Compare commits
4 Commits
da8c5db890
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 1b7bfcddbf | |||
| 2d6c3bff56 | |||
| c9f25ea149 | |||
| 3f2f14ac66 |
@@ -11,6 +11,8 @@ You are the **DevOps Engineer** responsible for infrastructure, deployment, and
|
|||||||
- Configure Docker containerization for Job Forge prototype
|
- Configure Docker containerization for Job Forge prototype
|
||||||
- Handle server deployment and resource optimization
|
- Handle server deployment and resource optimization
|
||||||
- Manage AI API key security and configuration
|
- Manage AI API key security and configuration
|
||||||
|
- **MANDATORY**: All Docker files must be stored in `docker/` folder
|
||||||
|
- **MANDATORY**: Document deployment issues and solutions in `docs/lessons-learned/`
|
||||||
|
|
||||||
### 2. Deployment Pipeline for Prototyping
|
### 2. Deployment Pipeline for Prototyping
|
||||||
- Simple deployment pipeline for server hosting
|
- Simple deployment pipeline for server hosting
|
||||||
@@ -18,6 +18,8 @@ You are the **Senior Full-Stack Developer** responsible for implementing both Fa
|
|||||||
- Connect frontend to FastAPI backend APIs
|
- Connect frontend to FastAPI backend APIs
|
||||||
- Create intuitive job application management interfaces
|
- Create intuitive job application management interfaces
|
||||||
- Optimize for performance and user experience
|
- Optimize for performance and user experience
|
||||||
|
- **MANDATORY**: Follow clean project structure (only source code in `src/`)
|
||||||
|
- **MANDATORY**: Document any issues encountered in `docs/lessons-learned/`
|
||||||
|
|
||||||
## Technology Stack - Job Forge
|
## Technology Stack - Job Forge
|
||||||
|
|
||||||
@@ -18,6 +18,8 @@ You are the **QA Engineer** responsible for ensuring high-quality software deliv
|
|||||||
- Database RLS policy testing
|
- Database RLS policy testing
|
||||||
- AI service integration testing with mocks
|
- AI service integration testing with mocks
|
||||||
- Performance testing for concurrent users
|
- Performance testing for concurrent users
|
||||||
|
- **MANDATORY**: All test files must be in `tests/` directory only
|
||||||
|
- **MANDATORY**: Document test failures and solutions in `docs/lessons-learned/`
|
||||||
|
|
||||||
### 3. Manual Testing & Validation
|
### 3. Manual Testing & Validation
|
||||||
- Exploratory testing for job application workflows
|
- Exploratory testing for job application workflows
|
||||||
@@ -19,12 +19,14 @@ You are the **Technical Lead** responsible for architecture decisions, code qual
|
|||||||
- Review and approve major architectural changes
|
- Review and approve major architectural changes
|
||||||
- Ensure security best practices for job application data
|
- Ensure security best practices for job application data
|
||||||
|
|
||||||
### 3. Quality Assurance
|
### 3. Quality Assurance & Project Structure
|
||||||
- Python code review standards
|
- Python code review standards
|
||||||
- pytest testing strategy
|
- pytest testing strategy
|
||||||
- FastAPI performance requirements
|
- FastAPI performance requirements
|
||||||
- Multi-tenant security guidelines
|
- Multi-tenant security guidelines
|
||||||
- AI integration documentation standards
|
- AI integration documentation standards
|
||||||
|
- **MANDATORY**: Enforce clean project structure (only necessary files in root)
|
||||||
|
- **MANDATORY**: Document all issues in `docs/lessons-learned/` with solutions
|
||||||
|
|
||||||
## Technology Stack - Job Forge
|
## Technology Stack - Job Forge
|
||||||
|
|
||||||
@@ -10,15 +10,21 @@
|
|||||||
"deny": []
|
"deny": []
|
||||||
},
|
},
|
||||||
"project": {
|
"project": {
|
||||||
"name": "SaaS Development Project",
|
"name": "Job Forge Project",
|
||||||
"type": "web-application",
|
"type": "web-application",
|
||||||
"tech_stack": ["Node.js", "React", "TypeScript", "PostgreSQL"]
|
"tech_stack": [
|
||||||
|
"python3.12",
|
||||||
|
"fastapi",
|
||||||
|
"dash",
|
||||||
|
"mantine",
|
||||||
|
"postgresql"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"team": {
|
"team": {
|
||||||
"main_orchestrator": "CLAUDE.md",
|
"main_orchestrator": "CLAUDE.md",
|
||||||
"specialist_agents": [
|
"specialist_agents": [
|
||||||
"agents/technical-lead.md",
|
"agents/technical-lead.md",
|
||||||
"agents/full-stack-developer.md",
|
"agents/full-stack-developer.md",
|
||||||
"agents/devops.md",
|
"agents/devops.md",
|
||||||
"agents/qa.md"
|
"agents/qa.md"
|
||||||
]
|
]
|
||||||
@@ -35,7 +41,11 @@
|
|||||||
"default_agent": "CLAUDE.md"
|
"default_agent": "CLAUDE.md"
|
||||||
},
|
},
|
||||||
"development": {
|
"development": {
|
||||||
"environments": ["development", "staging", "production"],
|
"environments": [
|
||||||
|
"development",
|
||||||
|
"staging",
|
||||||
|
"production"
|
||||||
|
],
|
||||||
"testing_required": true,
|
"testing_required": true,
|
||||||
"code_review_required": true
|
"code_review_required": true
|
||||||
},
|
},
|
||||||
|
|||||||
115
.claude/tools/agent_cache_wrapper.py
Normal file
115
.claude/tools/agent_cache_wrapper.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
# .claude/tools/agent_cache_wrapper.py
|
||||||
|
"""
|
||||||
|
Cache wrapper for AI agents
|
||||||
|
Use this in your agent workflows to add caching
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add the tools directory to Python path
|
||||||
|
tools_dir = Path(__file__).parent
|
||||||
|
sys.path.insert(0, str(tools_dir))
|
||||||
|
|
||||||
|
from local_cache_client import (
|
||||||
|
get_cache,
|
||||||
|
cached_ai_query,
|
||||||
|
store_ai_response,
|
||||||
|
print_cache_stats,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AgentCacheWrapper:
|
||||||
|
"""Wrapper for agent AI calls with caching support."""
|
||||||
|
|
||||||
|
def __init__(self, agent_type: str, project: str = None): # type: ignore
|
||||||
|
self.agent_type = agent_type
|
||||||
|
self.project = project or os.getenv("AI_CACHE_PROJECT", "job_forge")
|
||||||
|
self.cache = get_cache()
|
||||||
|
|
||||||
|
print(f"🤖 {agent_type.title()} Agent initialized with caching")
|
||||||
|
|
||||||
|
def query_with_cache(self, prompt: str, make_ai_call_func=None) -> str:
|
||||||
|
"""
|
||||||
|
Query with cache support.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prompt: The prompt to send
|
||||||
|
make_ai_call_func: Function to call if cache miss (should return AI response)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
AI response (from cache or fresh API call)
|
||||||
|
"""
|
||||||
|
# Try cache first
|
||||||
|
cached_response, was_hit = cached_ai_query(
|
||||||
|
prompt, self.agent_type, self.project
|
||||||
|
)
|
||||||
|
|
||||||
|
if was_hit:
|
||||||
|
return cached_response # type: ignore
|
||||||
|
|
||||||
|
# Cache miss - make AI call
|
||||||
|
if make_ai_call_func:
|
||||||
|
print(f"🤖 Making fresh AI call for {self.agent_type}...")
|
||||||
|
ai_response = make_ai_call_func(prompt)
|
||||||
|
|
||||||
|
# Store in cache for next time
|
||||||
|
if ai_response:
|
||||||
|
store_ai_response(prompt, ai_response, self.agent_type, self.project)
|
||||||
|
|
||||||
|
return ai_response
|
||||||
|
else:
|
||||||
|
print(f"⚠️ No AI call function provided for cache miss")
|
||||||
|
return None # type: ignore
|
||||||
|
|
||||||
|
def store_response(self, prompt: str, response: str):
|
||||||
|
"""Manually store a response in cache."""
|
||||||
|
store_ai_response(prompt, response, self.agent_type, self.project)
|
||||||
|
|
||||||
|
def get_stats(self):
|
||||||
|
"""Get cache statistics for this session."""
|
||||||
|
return self.cache.get_stats()
|
||||||
|
|
||||||
|
|
||||||
|
# Convenience functions for each agent type
|
||||||
|
def technical_lead_query(prompt: str, ai_call_func=None) -> str:
|
||||||
|
"""Technical Lead agent with caching."""
|
||||||
|
wrapper = AgentCacheWrapper("technical_lead")
|
||||||
|
return wrapper.query_with_cache(prompt, ai_call_func)
|
||||||
|
|
||||||
|
|
||||||
|
def qa_engineer_query(prompt: str, ai_call_func=None) -> str:
|
||||||
|
"""QA Engineer agent with caching."""
|
||||||
|
wrapper = AgentCacheWrapper("qa_engineer")
|
||||||
|
return wrapper.query_with_cache(prompt, ai_call_func)
|
||||||
|
|
||||||
|
|
||||||
|
def devops_engineer_query(prompt: str, ai_call_func=None) -> str:
|
||||||
|
"""DevOps Engineer agent with caching."""
|
||||||
|
wrapper = AgentCacheWrapper("devops_engineer")
|
||||||
|
return wrapper.query_with_cache(prompt, ai_call_func)
|
||||||
|
|
||||||
|
|
||||||
|
def fullstack_developer_query(prompt: str, ai_call_func=None) -> str:
|
||||||
|
"""Full-Stack Developer agent with caching."""
|
||||||
|
wrapper = AgentCacheWrapper("fullstack_developer")
|
||||||
|
return wrapper.query_with_cache(prompt, ai_call_func)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage and testing
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example AI call function (replace with your actual Claude Code integration)
|
||||||
|
def example_ai_call(prompt):
|
||||||
|
# This is where you'd call Claude Code or your AI service
|
||||||
|
# For testing, return a mock response
|
||||||
|
return f"Mock AI response for: {prompt[:50]}..."
|
||||||
|
|
||||||
|
# Test with Technical Lead
|
||||||
|
response = technical_lead_query(
|
||||||
|
"What is the current FastAPI project structure?", example_ai_call
|
||||||
|
)
|
||||||
|
print(f"Response: {response}")
|
||||||
|
|
||||||
|
# Print stats
|
||||||
|
print_cache_stats()
|
||||||
307
.claude/tools/local_cache_client.py
Normal file
307
.claude/tools/local_cache_client.py
Normal file
@@ -0,0 +1,307 @@
|
|||||||
|
# .claude/tools/local_cache_client.py
|
||||||
|
"""
|
||||||
|
AI Cache Client for Local Development
|
||||||
|
Integrates with n8n-based AI response caching system
|
||||||
|
"""
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import hashlib
|
||||||
|
import time
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class AICacheClient:
|
||||||
|
"""Client for interacting with AI Cache MCP service."""
|
||||||
|
|
||||||
|
def __init__(self, base_url: str = None, enabled: bool = True): # type: ignore
|
||||||
|
# Default to your n8n webhook URL
|
||||||
|
self.base_url = base_url or os.getenv(
|
||||||
|
"AI_CACHE_URL", "https://n8n.hotserv.cloud/webhook"
|
||||||
|
)
|
||||||
|
self.enabled = (
|
||||||
|
enabled and os.getenv("AI_CACHE_ENABLED", "true").lower() == "true"
|
||||||
|
)
|
||||||
|
self.timeout = int(os.getenv("AI_CACHE_TIMEOUT", "15"))
|
||||||
|
|
||||||
|
# Stats tracking
|
||||||
|
self.session_hits = 0
|
||||||
|
self.session_misses = 0
|
||||||
|
self.session_start = time.time()
|
||||||
|
self.connection_failed = False
|
||||||
|
|
||||||
|
if self.enabled:
|
||||||
|
print(f"🧠 AI Cache enabled: {self.base_url}")
|
||||||
|
self._test_connection()
|
||||||
|
else:
|
||||||
|
print("⚠️ AI Cache disabled")
|
||||||
|
|
||||||
|
def _test_connection(self):
|
||||||
|
"""Test if the cache service is accessible."""
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/ai-cache-stats",
|
||||||
|
timeout=3 # Quick test
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
print("✅ Cache service is accessible")
|
||||||
|
else:
|
||||||
|
print(f"⚠️ Cache service returned HTTP {response.status_code}")
|
||||||
|
self.connection_failed = True
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Cache service unreachable: {str(e)[:50]}...")
|
||||||
|
self.connection_failed = True
|
||||||
|
|
||||||
|
def _normalize_prompt(self, prompt: str) -> str:
|
||||||
|
"""Normalize prompt for consistent matching."""
|
||||||
|
return prompt.strip().lower().replace("\n", " ").replace(" ", " ")
|
||||||
|
|
||||||
|
def lookup_cache(
|
||||||
|
self, prompt: str, agent_type: str, project: str = "job_forge"
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Look up a cached AI response."""
|
||||||
|
if not self.enabled or self.connection_failed:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
response = requests.post(
|
||||||
|
f"{self.base_url}/ai-cache-lookup",
|
||||||
|
json={"prompt": prompt, "agent_type": agent_type, "project": project},
|
||||||
|
timeout=self.timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
lookup_time = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
try:
|
||||||
|
# Debug: print raw response
|
||||||
|
raw_text = response.text
|
||||||
|
print(f"🔍 Debug - Raw response: '{raw_text[:100]}...'")
|
||||||
|
|
||||||
|
if not raw_text.strip():
|
||||||
|
print(f"❌ Cache MISS [{agent_type}] - Empty response | Lookup: {lookup_time:.0f}ms")
|
||||||
|
self.session_misses += 1
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
if data.get("found"):
|
||||||
|
similarity = data.get("similarity", 1.0)
|
||||||
|
hit_count = data.get("hit_count", 1)
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"✅ Cache HIT! [{agent_type}] Similarity: {similarity:.2f} | Used: {hit_count}x | Lookup: {lookup_time:.0f}ms"
|
||||||
|
)
|
||||||
|
self.session_hits += 1
|
||||||
|
return data.get("response")
|
||||||
|
else:
|
||||||
|
print(f"❌ Cache MISS [{agent_type}] | Lookup: {lookup_time:.0f}ms")
|
||||||
|
self.session_misses += 1
|
||||||
|
return None
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
print(f"🚨 JSON decode error: {str(e)} | Response: '{response.text[:50]}'")
|
||||||
|
self.session_misses += 1
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
print(f"⚠️ Cache lookup failed: HTTP {response.status_code}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
print(f"⏱️ Cache lookup timeout ({self.timeout}s)")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
print(f"🚨 Cache error: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def store_cache(
|
||||||
|
self,
|
||||||
|
prompt: str,
|
||||||
|
response: str,
|
||||||
|
agent_type: str,
|
||||||
|
ai_service: str = "claude",
|
||||||
|
model: str = "claude-sonnet-4",
|
||||||
|
project: str = "job_forge",
|
||||||
|
) -> bool:
|
||||||
|
"""Store an AI response in cache."""
|
||||||
|
if not self.enabled or not response or len(response.strip()) < 10:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
result = requests.post(
|
||||||
|
f"{self.base_url}/ai-cache-store",
|
||||||
|
json={
|
||||||
|
"prompt": prompt,
|
||||||
|
"response": response,
|
||||||
|
"ai_service": ai_service,
|
||||||
|
"model": model,
|
||||||
|
"agent_type": agent_type,
|
||||||
|
"project": project,
|
||||||
|
},
|
||||||
|
timeout=self.timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
store_time = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
|
if result.status_code == 200:
|
||||||
|
data = result.json()
|
||||||
|
if data.get("success"):
|
||||||
|
print(
|
||||||
|
f"💾 Response cached [{agent_type}] | Store: {store_time:.0f}ms"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"📄 Already cached [{agent_type}] | Store: {store_time:.0f}ms"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print(f"⚠️ Cache store failed: HTTP {result.status_code}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
print(f"⏱️ Cache store timeout ({self.timeout}s)")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
print(f"🚨 Cache store error: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get cache statistics."""
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/ai-cache-stats", timeout=self.timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
stats = response.json()
|
||||||
|
|
||||||
|
# Add session stats
|
||||||
|
session_time = time.time() - self.session_start
|
||||||
|
session_total = self.session_hits + self.session_misses
|
||||||
|
session_hit_rate = (
|
||||||
|
(self.session_hits / session_total * 100)
|
||||||
|
if session_total > 0
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
|
||||||
|
stats["session_stats"] = {
|
||||||
|
"hits": self.session_hits,
|
||||||
|
"misses": self.session_misses,
|
||||||
|
"total": session_total,
|
||||||
|
"hit_rate_percentage": round(session_hit_rate, 1),
|
||||||
|
"duration_minutes": round(session_time / 60, 1),
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats
|
||||||
|
else:
|
||||||
|
return {"error": f"Failed to get stats: {response.status_code}"}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": f"Stats error: {str(e)}"}
|
||||||
|
|
||||||
|
def print_session_summary(self):
|
||||||
|
"""Print session cache performance summary."""
|
||||||
|
total = self.session_hits + self.session_misses
|
||||||
|
if total == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
hit_rate = (self.session_hits / total) * 100
|
||||||
|
session_time = (time.time() - self.session_start) / 60
|
||||||
|
|
||||||
|
print(f"\n📊 Cache Session Summary:")
|
||||||
|
print(
|
||||||
|
f" Hits: {self.session_hits} | Misses: {self.session_misses} | Hit Rate: {hit_rate:.1f}%"
|
||||||
|
)
|
||||||
|
print(f" Session Time: {session_time:.1f} minutes")
|
||||||
|
|
||||||
|
if hit_rate > 60:
|
||||||
|
print(f" 🎉 Excellent cache performance!")
|
||||||
|
elif hit_rate > 30:
|
||||||
|
print(f" 👍 Good cache performance")
|
||||||
|
else:
|
||||||
|
print(f" 📈 Cache is learning your patterns...")
|
||||||
|
|
||||||
|
|
||||||
|
# Global cache instance
|
||||||
|
_cache_instance = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache() -> AICacheClient:
|
||||||
|
"""Get or create global cache instance."""
|
||||||
|
global _cache_instance
|
||||||
|
if _cache_instance is None:
|
||||||
|
_cache_instance = AICacheClient()
|
||||||
|
return _cache_instance
|
||||||
|
|
||||||
|
|
||||||
|
def cached_ai_query(
|
||||||
|
prompt: str, agent_type: str, project: str = "job_forge"
|
||||||
|
) -> tuple[Optional[str], bool]:
|
||||||
|
"""
|
||||||
|
Helper function for cached AI queries.
|
||||||
|
Returns: (cached_response, was_cache_hit)
|
||||||
|
"""
|
||||||
|
cache = get_cache()
|
||||||
|
cached_response = cache.lookup_cache(prompt, agent_type, project)
|
||||||
|
|
||||||
|
if cached_response:
|
||||||
|
return cached_response, True
|
||||||
|
else:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
|
||||||
|
def store_ai_response(
|
||||||
|
prompt: str, response: str, agent_type: str, project: str = "job_forge"
|
||||||
|
):
|
||||||
|
"""Helper function to store AI responses."""
|
||||||
|
cache = get_cache()
|
||||||
|
cache.store_cache(prompt, response, agent_type, project=project)
|
||||||
|
|
||||||
|
|
||||||
|
def print_cache_stats():
|
||||||
|
"""Print current cache statistics."""
|
||||||
|
cache = get_cache()
|
||||||
|
stats = cache.get_stats()
|
||||||
|
|
||||||
|
if "error" in stats:
|
||||||
|
print(f"❌ {stats['error']}")
|
||||||
|
return
|
||||||
|
|
||||||
|
summary = stats.get("summary", {})
|
||||||
|
session = stats.get("session_stats", {})
|
||||||
|
|
||||||
|
print(f"\n📈 AI Cache Statistics:")
|
||||||
|
print(f" Overall Hit Rate: {summary.get('hit_rate_percentage', 0)}%")
|
||||||
|
print(f" Total Saved: ${summary.get('total_cost_saved_usd', 0):.2f}")
|
||||||
|
print(f" API Calls Saved: {summary.get('api_calls_saved', 0)}")
|
||||||
|
|
||||||
|
if session:
|
||||||
|
print(
|
||||||
|
f" This Session: {session['hits']}/{session['total']} hits ({session['hit_rate_percentage']}%)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage for testing
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Test the cache
|
||||||
|
cache = get_cache()
|
||||||
|
|
||||||
|
# Test lookup
|
||||||
|
result = cache.lookup_cache("What is the database schema?", "technical_lead")
|
||||||
|
print(f"Lookup result: {result}")
|
||||||
|
|
||||||
|
# Test store
|
||||||
|
cache.store_cache(
|
||||||
|
"What is the database schema?",
|
||||||
|
"PostgreSQL with users and applications tables",
|
||||||
|
"technical_lead",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Print stats
|
||||||
|
print_cache_stats()
|
||||||
|
cache.print_session_summary()
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -162,10 +162,16 @@ Thumbs.db
|
|||||||
# User data and uploads
|
# User data and uploads
|
||||||
user_data/
|
user_data/
|
||||||
uploads/
|
uploads/
|
||||||
|
documents/
|
||||||
|
|
||||||
# AI model cache
|
# AI model cache
|
||||||
.cache/
|
.cache/
|
||||||
models/
|
models/
|
||||||
|
ai_cache/
|
||||||
|
embeddings_cache/
|
||||||
|
|
||||||
|
# Database volumes
|
||||||
|
postgres_data/
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
|
|||||||
69
CLAUDE.md
69
CLAUDE.md
@@ -332,13 +332,76 @@ Your specialized development team is ready to deliver the Job Forge AI-powered j
|
|||||||
|
|
||||||
**Start building your Job Forge prototype!** 🚀
|
**Start building your Job Forge prototype!** 🚀
|
||||||
|
|
||||||
# Documentation Structure
|
## Project Structure and Organization
|
||||||
All project documentation is centralized in the `docs/` folder. See [README.md](README.md) for complete documentation navigation.
|
|
||||||
|
### 📁 Clean Project Structure Requirements
|
||||||
|
**MANDATORY**: Only necessary files should be stored in the project root folder. All supporting files must be organized into appropriate subdirectories:
|
||||||
|
|
||||||
|
```
|
||||||
|
job-forge/
|
||||||
|
├── src/ # Source code only
|
||||||
|
├── tests/ # Test files only
|
||||||
|
├── docs/ # All documentation
|
||||||
|
├── docker/ # All Docker-related files
|
||||||
|
├── database/ # Database scripts and migrations
|
||||||
|
├── .env.example # Environment template
|
||||||
|
├── requirements-*.txt # Python dependencies
|
||||||
|
├── pytest.ini # Test configuration
|
||||||
|
└── README.md # Main project readme
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔧 Docker Files Organization
|
||||||
|
All Docker-related files are stored in the `docker/` folder:
|
||||||
|
- `docker/docker-compose.yml` - Main orchestration file
|
||||||
|
- `docker/Dockerfile.backend` - Backend container definition
|
||||||
|
- `docker/Dockerfile.frontend` - Frontend container definition
|
||||||
|
|
||||||
|
**Usage**: Run `cd docker && docker compose up -d` to start the environment.
|
||||||
|
|
||||||
|
### 📚 Documentation Structure Requirements
|
||||||
|
All project documentation is centralized in the `docs/` folder:
|
||||||
|
- `docs/lessons-learned/` - **MANDATORY**: All project issues and solutions
|
||||||
|
- `docs/api_specification.md` - API documentation
|
||||||
|
- `docs/database_design.md` - Database schema and design
|
||||||
|
- `docs/development/` - Development guides and standards
|
||||||
|
|
||||||
|
### 📝 Lessons Learned Process
|
||||||
|
**MANDATORY**: For every issue encountered during development:
|
||||||
|
1. Create a new markdown file in `docs/lessons-learned/`
|
||||||
|
2. Use format: `###-issue-name.md` (where ### is sequential number)
|
||||||
|
3. Include: Issue name, description, error messages, root cause, solution, prevention strategy
|
||||||
|
4. Reference the lesson learned in relevant documentation
|
||||||
|
|
||||||
|
### 🏗️ Development Environment Setup
|
||||||
|
```bash
|
||||||
|
# 1. Clone and navigate to project
|
||||||
|
git clone <repository>
|
||||||
|
cd job-forge
|
||||||
|
|
||||||
|
# 2. Set up environment variables
|
||||||
|
cp .env.example .env
|
||||||
|
# Edit .env with your API keys
|
||||||
|
|
||||||
|
# 3. Start development environment
|
||||||
|
cd docker
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# 4. Access applications
|
||||||
|
# Frontend: http://localhost:8501
|
||||||
|
# Backend: http://localhost:8000
|
||||||
|
# Database: localhost:5432
|
||||||
|
```
|
||||||
|
|
||||||
# Important Instructions
|
# Important Instructions
|
||||||
|
- **Clean Structure**: Only necessary files in project root
|
||||||
|
- **Docker Organization**: All Docker files in `docker/` folder
|
||||||
|
- **Lessons Learned**: Document all issues in `docs/lessons-learned/`
|
||||||
- Focus on Python/FastAPI backend implementation
|
- Focus on Python/FastAPI backend implementation
|
||||||
- Use Dash + Mantine for frontend components
|
- Use Dash + Mantine for frontend components
|
||||||
- Prioritize core job application workflows
|
- Prioritize core job application workflows
|
||||||
- Maintain deployable prototype state
|
- Maintain deployable prototype state
|
||||||
- Ensure AI service integration reliability
|
- Ensure AI service integration reliability
|
||||||
- Follow established quality gates for all features
|
- Follow established quality gates for all features
|
||||||
|
|
||||||
|
## Project Memories
|
||||||
|
- Save files in an organized manner in the project folder to keep it clear and maintainable
|
||||||
30
README.md
30
README.md
@@ -9,16 +9,38 @@
|
|||||||
|
|
||||||
## 🚀 Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
|
### Docker Development (Recommended)
|
||||||
```bash
|
```bash
|
||||||
# Clone the repository
|
# Clone the repository
|
||||||
git clone https://github.com/yourusername/job-forge.git
|
git clone https://github.com/yourusername/job-forge.git
|
||||||
cd job-forge
|
cd job-forge
|
||||||
|
|
||||||
# Start development environment
|
# Set up environment variables
|
||||||
docker-compose up -d
|
cp .env.example .env
|
||||||
|
# Edit .env with your API keys (Claude, OpenAI, JWT secret)
|
||||||
|
|
||||||
# Access the application
|
# Start development environment
|
||||||
open http://localhost:8000
|
cd docker
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# Access the applications
|
||||||
|
# Frontend: http://localhost:8501
|
||||||
|
# Backend API: http://localhost:8000
|
||||||
|
# Database: localhost:5432
|
||||||
|
```
|
||||||
|
|
||||||
|
### Local Development Setup
|
||||||
|
```bash
|
||||||
|
# For local development and testing
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
# For development dependencies only
|
||||||
|
pip install -r dev-requirements.txt
|
||||||
|
|
||||||
|
# Run tests locally
|
||||||
|
python validate_tests.py # Validate test structure
|
||||||
|
python run_tests.py # Run API tests against Docker environment
|
||||||
|
pytest # Run full pytest suite (requires local services)
|
||||||
```
|
```
|
||||||
|
|
||||||
## 📚 Documentation Navigation
|
## 📚 Documentation Navigation
|
||||||
|
|||||||
33
dev-requirements.txt
Normal file
33
dev-requirements.txt
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Job Forge - Development and Testing Requirements
|
||||||
|
# Install with: pip install -r dev-requirements.txt
|
||||||
|
|
||||||
|
# Testing framework
|
||||||
|
pytest==8.0.2
|
||||||
|
pytest-asyncio==0.23.5
|
||||||
|
pytest-cov==4.0.0
|
||||||
|
pytest-mock==3.12.0
|
||||||
|
pytest-dash==2.1.2
|
||||||
|
|
||||||
|
# Code quality
|
||||||
|
black==24.2.0
|
||||||
|
isort==5.13.2
|
||||||
|
flake8==7.0.0
|
||||||
|
mypy==1.8.0
|
||||||
|
|
||||||
|
# Security testing
|
||||||
|
bandit==1.7.7
|
||||||
|
|
||||||
|
# Core dependencies for testing
|
||||||
|
structlog==24.1.0
|
||||||
|
sqlalchemy[asyncio]==2.0.29
|
||||||
|
fastapi==0.109.2
|
||||||
|
httpx==0.27.0
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
|
||||||
|
# Authentication testing
|
||||||
|
python-jose[cryptography]==3.3.0
|
||||||
|
passlib[bcrypt]==1.7.4
|
||||||
|
|
||||||
|
# Database testing
|
||||||
|
asyncpg==0.29.0
|
||||||
|
psycopg2-binary==2.9.9
|
||||||
28
docker/Dockerfile.backend
Normal file
28
docker/Dockerfile.backend
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements and install Python dependencies
|
||||||
|
COPY requirements-backend.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements-backend.txt
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY src/ ./src/
|
||||||
|
|
||||||
|
# Create non-root user
|
||||||
|
RUN useradd -m -u 1000 jobforge && chown -R jobforge:jobforge /app
|
||||||
|
USER jobforge
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD ["uvicorn", "src.backend.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
23
docker/Dockerfile.frontend
Normal file
23
docker/Dockerfile.frontend
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements and install Python dependencies
|
||||||
|
COPY requirements-frontend.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements-frontend.txt
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY src/ ./src/
|
||||||
|
|
||||||
|
# Create non-root user
|
||||||
|
RUN useradd -m -u 1000 jobforge && chown -R jobforge:jobforge /app
|
||||||
|
USER jobforge
|
||||||
|
|
||||||
|
EXPOSE 8501
|
||||||
|
|
||||||
|
CMD ["python", "src/frontend/main.py"]
|
||||||
@@ -12,7 +12,7 @@ services:
|
|||||||
- "5432:5432"
|
- "5432:5432"
|
||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
- ./database/init.sql:/docker-entrypoint-initdb.d/init.sql
|
- ../database/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pg_isready -U jobforge_user -d jobforge_mvp"]
|
test: ["CMD-SHELL", "pg_isready -U jobforge_user -d jobforge_mvp"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
@@ -21,8 +21,8 @@ services:
|
|||||||
|
|
||||||
backend:
|
backend:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: ..
|
||||||
dockerfile: Dockerfile.backend
|
dockerfile: docker/Dockerfile.backend
|
||||||
container_name: jobforge_backend
|
container_name: jobforge_backend
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
@@ -34,7 +34,7 @@ services:
|
|||||||
- DEBUG=true
|
- DEBUG=true
|
||||||
- LOG_LEVEL=INFO
|
- LOG_LEVEL=INFO
|
||||||
volumes:
|
volumes:
|
||||||
- ./src:/app/src
|
- ../src:/app/src
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -47,19 +47,19 @@ services:
|
|||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: ..
|
||||||
dockerfile: Dockerfile.frontend
|
dockerfile: docker/Dockerfile.frontend
|
||||||
container_name: jobforge_frontend
|
container_name: jobforge_frontend
|
||||||
ports:
|
ports:
|
||||||
- "8501:8501"
|
- "8501:8501"
|
||||||
environment:
|
environment:
|
||||||
- BACKEND_URL=http://backend:8000
|
- BACKEND_URL=http://backend:8000
|
||||||
volumes:
|
volumes:
|
||||||
- ./src/frontend:/app/src/frontend
|
- ../src/frontend:/app/src/frontend
|
||||||
depends_on:
|
depends_on:
|
||||||
backend:
|
backend:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
command: python src/frontend/main.py
|
command: sh -c "cd src/frontend && python main.py"
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
49
docs/lessons-learned/001-dependency-version-conflicts.md
Normal file
49
docs/lessons-learned/001-dependency-version-conflicts.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# Lesson Learned #001: Dependency Version Conflicts
|
||||||
|
|
||||||
|
## Issue Name
|
||||||
|
Dependency Version Conflicts in Requirements Files
|
||||||
|
|
||||||
|
## Date
|
||||||
|
2025-08-02
|
||||||
|
|
||||||
|
## Description
|
||||||
|
During project setup, encountered version conflicts with Python package dependencies:
|
||||||
|
- `pytest-dash==2.5.0` - Version did not exist in PyPI (max available: 2.1.2)
|
||||||
|
- `python-bcrypt==4.1.2` - Incorrect package name (should be `bcrypt==4.1.2`)
|
||||||
|
|
||||||
|
## Error Messages
|
||||||
|
```
|
||||||
|
ERROR: Could not find a version that satisfies the requirement pytest-dash==2.5.0
|
||||||
|
(from versions: 0.1.0, 0.1.1, 0.1.2, 0.1.3, 0.2.0rc1, 0.2.0rc2, 0.2.0rc3, 1.0.0, 1.0.1, 1.1.0, 2.0.0rc1, 2.0.0rc2, 2.0.0rc3, 2.0.0rc4, 2.0.0rc5, 2.0.0, 2.1.0, 2.1.1, 2.1.2)
|
||||||
|
|
||||||
|
ERROR: Could not find a version that satisfies the requirement python-bcrypt==4.1.2
|
||||||
|
(from versions: 0.3.1, 0.3.2)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Root Cause
|
||||||
|
1. Incorrect package versions specified without checking PyPI availability
|
||||||
|
2. Wrong package name used for bcrypt library
|
||||||
|
|
||||||
|
## Solution Applied
|
||||||
|
1. **Updated pytest-dash version**:
|
||||||
|
- Changed from `pytest-dash==2.5.0` to `pytest-dash==2.1.2`
|
||||||
|
- Verified latest available version on PyPI
|
||||||
|
|
||||||
|
2. **Fixed bcrypt package name**:
|
||||||
|
- Changed from `python-bcrypt==4.1.2` to `bcrypt==4.1.2`
|
||||||
|
- Used correct package name
|
||||||
|
|
||||||
|
## Files Modified
|
||||||
|
- `requirements-backend.txt` - Fixed bcrypt package name
|
||||||
|
- `requirements-frontend.txt` - Updated pytest-dash version
|
||||||
|
|
||||||
|
## Prevention Strategy
|
||||||
|
1. Always verify package versions exist on PyPI before adding to requirements
|
||||||
|
2. Use `pip search` or check PyPI website for correct package names
|
||||||
|
3. Consider using version ranges instead of exact pins for non-critical dependencies
|
||||||
|
4. Implement CI/CD checks to validate requirements files
|
||||||
|
|
||||||
|
## Impact
|
||||||
|
- ✅ All dependencies now install successfully
|
||||||
|
- ✅ Project setup process is streamlined
|
||||||
|
- ✅ Development environment can be started without version conflicts
|
||||||
77
docs/lessons-learned/002-project-structure-organization.md
Normal file
77
docs/lessons-learned/002-project-structure-organization.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# Lesson Learned #002: Project Structure Organization
|
||||||
|
|
||||||
|
## Issue Name
|
||||||
|
Project Structure Organization and Clean Root Directory
|
||||||
|
|
||||||
|
## Date
|
||||||
|
2025-08-02
|
||||||
|
|
||||||
|
## Description
|
||||||
|
Initial project setup had Docker configuration files scattered in the root directory, making the project structure cluttered and harder to navigate. This violated clean project organization principles.
|
||||||
|
|
||||||
|
## Root Cause
|
||||||
|
- Docker files (Dockerfile.backend, Dockerfile.frontend, docker-compose.yml) were placed in project root
|
||||||
|
- No established guidelines for file organization
|
||||||
|
- Lack of mandatory documentation for project issues
|
||||||
|
|
||||||
|
## Solution Applied
|
||||||
|
1. **Created organized folder structure**:
|
||||||
|
```
|
||||||
|
job-forge/
|
||||||
|
├── src/ # Source code only
|
||||||
|
├── tests/ # Test files only
|
||||||
|
├── docs/ # All documentation
|
||||||
|
├── docker/ # All Docker-related files
|
||||||
|
├── database/ # Database scripts and migrations
|
||||||
|
├── .env.example # Environment template
|
||||||
|
├── requirements-*.txt # Python dependencies
|
||||||
|
├── pytest.ini # Test configuration
|
||||||
|
└── README.md # Main project readme
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Moved Docker files to dedicated folder**:
|
||||||
|
- Moved all Docker files to `docker/` directory
|
||||||
|
- Updated docker-compose.yml paths to reference parent directory (`../`)
|
||||||
|
- Updated project documentation to reflect new structure
|
||||||
|
|
||||||
|
3. **Created lessons-learned process**:
|
||||||
|
- Created `docs/lessons-learned/` folder
|
||||||
|
- Established mandatory documentation process for all issues
|
||||||
|
- Added sequential numbering system for lesson learned entries
|
||||||
|
|
||||||
|
## Files Modified
|
||||||
|
- `docker/docker-compose.yml` - Updated paths for new structure
|
||||||
|
- `CLAUDE.md` - Added project structure requirements and lessons learned process
|
||||||
|
- `.claude/agents/*.md` - Updated all agent files with structure requirements
|
||||||
|
- `README.md` - Updated quick start instructions
|
||||||
|
|
||||||
|
## New Mandatory Requirements
|
||||||
|
1. **Clean Root Directory**: Only essential files in project root
|
||||||
|
2. **Docker Organization**: All Docker files in `docker/` folder
|
||||||
|
3. **Lessons Learned**: Document every issue in `docs/lessons-learned/`
|
||||||
|
4. **Sequential Documentation**: Use numbered format (###-issue-name.md)
|
||||||
|
|
||||||
|
## Prevention Strategy
|
||||||
|
1. Establish clear folder structure guidelines in project documentation
|
||||||
|
2. Add project structure validation to CI/CD if implemented
|
||||||
|
3. Regular project structure reviews during development
|
||||||
|
4. Mandatory issue documentation process for all team members
|
||||||
|
|
||||||
|
## Usage Instructions
|
||||||
|
```bash
|
||||||
|
# Start development environment from docker folder
|
||||||
|
cd docker
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# Access applications
|
||||||
|
# Frontend: http://localhost:8501
|
||||||
|
# Backend: http://localhost:8000
|
||||||
|
# Database: localhost:5432
|
||||||
|
```
|
||||||
|
|
||||||
|
## Impact
|
||||||
|
- ✅ Clean, organized project structure
|
||||||
|
- ✅ Easier navigation and maintenance
|
||||||
|
- ✅ Established process for documenting project issues
|
||||||
|
- ✅ Better adherence to software engineering best practices
|
||||||
|
- ✅ Updated all team documentation and agent instructions
|
||||||
120
docs/lessons-learned/003-requirements-file-organization.md
Normal file
120
docs/lessons-learned/003-requirements-file-organization.md
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
# Lesson Learned #003: Requirements File Organization
|
||||||
|
|
||||||
|
## Issue Name
|
||||||
|
Missing main requirements.txt for local development and testing
|
||||||
|
|
||||||
|
## Description
|
||||||
|
After implementing comprehensive test coverage, tests could not be run locally due to missing dependencies. The project had separate `requirements-backend.txt` and `requirements-frontend.txt` files for Docker containers, but no unified requirements file for local development.
|
||||||
|
|
||||||
|
## Error Messages
|
||||||
|
```
|
||||||
|
ModuleNotFoundError: No module named 'structlog'
|
||||||
|
ModuleNotFoundError: No module named 'pytest'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Root Cause
|
||||||
|
1. **Fragmented Dependencies**: Backend and frontend requirements were split into separate files for Docker optimization
|
||||||
|
2. **Missing Local Setup**: No unified requirements file for local development and testing
|
||||||
|
3. **Documentation Gap**: README didn't clearly explain how to install dependencies for local testing
|
||||||
|
|
||||||
|
## Solution Implemented
|
||||||
|
|
||||||
|
### 1. Created Main Requirements File
|
||||||
|
- **File**: `requirements.txt`
|
||||||
|
- **Purpose**: Combined all dependencies for local development
|
||||||
|
- **Content**: Merged backend and frontend requirements
|
||||||
|
|
||||||
|
### 2. Created Development Requirements File
|
||||||
|
- **File**: `dev-requirements.txt`
|
||||||
|
- **Purpose**: Testing and development dependencies only
|
||||||
|
- **Content**: pytest, black, flake8, mypy, and core dependencies needed for testing
|
||||||
|
|
||||||
|
### 3. Updated Documentation
|
||||||
|
- **File**: `README.md`
|
||||||
|
- **Section**: Quick Start
|
||||||
|
- **Addition**: Local development setup instructions with proper pip install commands
|
||||||
|
|
||||||
|
### 4. Maintained Docker Optimization
|
||||||
|
- **Approach**: Kept separate `requirements-backend.txt` and `requirements-frontend.txt` for Docker containers
|
||||||
|
- **Benefit**: Smaller container images with only necessary dependencies
|
||||||
|
|
||||||
|
## File Structure Created
|
||||||
|
```
|
||||||
|
job-forge/
|
||||||
|
├── requirements.txt # All dependencies for local development
|
||||||
|
├── dev-requirements.txt # Development and testing dependencies only
|
||||||
|
├── requirements-backend.txt # Backend container dependencies (existing)
|
||||||
|
├── requirements-frontend.txt # Frontend container dependencies (existing)
|
||||||
|
└── README.md # Updated with local setup instructions
|
||||||
|
```
|
||||||
|
|
||||||
|
## Prevention Strategy
|
||||||
|
|
||||||
|
### 1. Requirements File Standards
|
||||||
|
- **Main Requirements**: Always maintain a unified `requirements.txt` for local development
|
||||||
|
- **Development Requirements**: Separate `dev-requirements.txt` for testing tools
|
||||||
|
- **Container Requirements**: Keep optimized files for Docker containers
|
||||||
|
|
||||||
|
### 2. Documentation Requirements
|
||||||
|
- **Installation Instructions**: Clear pip install commands in README
|
||||||
|
- **Testing Setup**: Document how to run tests locally vs in containers
|
||||||
|
- **Dependencies Explanation**: Explain the purpose of each requirements file
|
||||||
|
|
||||||
|
### 3. Testing Integration
|
||||||
|
- **Local Testing**: Ensure tests can run with local pip-installed dependencies
|
||||||
|
- **Container Testing**: Maintain ability to test within Docker environment
|
||||||
|
- **CI/CD Integration**: Use appropriate requirements file for each environment
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Requirements.txt Content
|
||||||
|
```
|
||||||
|
# Combined requirements for local development
|
||||||
|
fastapi==0.109.2
|
||||||
|
uvicorn[standard]==0.27.1
|
||||||
|
# ... (all backend and frontend dependencies)
|
||||||
|
pytest==8.0.2
|
||||||
|
pytest-asyncio==0.23.5
|
||||||
|
# ... (all testing dependencies)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Dev-Requirements.txt Content
|
||||||
|
```
|
||||||
|
# Development and testing only
|
||||||
|
pytest==8.0.2
|
||||||
|
pytest-asyncio==0.23.5
|
||||||
|
pytest-cov==4.0.0
|
||||||
|
black==24.2.0
|
||||||
|
# ... (minimal set for testing)
|
||||||
|
```
|
||||||
|
|
||||||
|
### README Update
|
||||||
|
```bash
|
||||||
|
# For local development and testing
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
# For development dependencies only
|
||||||
|
pip install -r dev-requirements.txt
|
||||||
|
|
||||||
|
# Run tests locally
|
||||||
|
python validate_tests.py
|
||||||
|
python run_tests.py
|
||||||
|
pytest
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Takeaways
|
||||||
|
|
||||||
|
1. **Multiple Requirements Files**: Different environments need different dependency sets
|
||||||
|
2. **Local Development Priority**: Always provide easy local setup for developers
|
||||||
|
3. **Documentation Clarity**: Clear installation instructions prevent frustration
|
||||||
|
4. **Container Optimization**: Keep container-specific requirements minimal and focused
|
||||||
|
|
||||||
|
## Status
|
||||||
|
✅ **RESOLVED** - Created unified requirements files and updated documentation
|
||||||
|
|
||||||
|
## Related Files
|
||||||
|
- `requirements.txt` (new)
|
||||||
|
- `dev-requirements.txt` (new)
|
||||||
|
- `README.md` (updated)
|
||||||
|
- `requirements-backend.txt` (existing, unchanged)
|
||||||
|
- `requirements-frontend.txt` (existing, unchanged)
|
||||||
@@ -12,7 +12,7 @@ psycopg2-binary==2.9.9
|
|||||||
# Authentication & Security
|
# Authentication & Security
|
||||||
python-jose[cryptography]==3.3.0
|
python-jose[cryptography]==3.3.0
|
||||||
passlib[bcrypt]==1.7.4
|
passlib[bcrypt]==1.7.4
|
||||||
python-bcrypt==4.1.2
|
bcrypt==4.1.2
|
||||||
|
|
||||||
# AI Services
|
# AI Services
|
||||||
anthropic==0.21.3
|
anthropic==0.21.3
|
||||||
@@ -23,7 +23,7 @@ pgvector==0.2.5
|
|||||||
numpy==1.26.4
|
numpy==1.26.4
|
||||||
|
|
||||||
# Data validation
|
# Data validation
|
||||||
pydantic==2.6.3
|
pydantic[email]==2.6.3
|
||||||
pydantic-settings==2.2.1
|
pydantic-settings==2.2.1
|
||||||
|
|
||||||
# HTTP client
|
# HTTP client
|
||||||
|
|||||||
@@ -11,11 +11,15 @@ httpx==0.27.0
|
|||||||
pandas==2.2.1
|
pandas==2.2.1
|
||||||
plotly==5.18.0
|
plotly==5.18.0
|
||||||
|
|
||||||
|
# File handling
|
||||||
|
Pillow==10.2.0
|
||||||
|
|
||||||
# Utilities
|
# Utilities
|
||||||
python-dotenv==1.0.1
|
python-dotenv==1.0.1
|
||||||
structlog==24.1.0
|
structlog==24.1.0
|
||||||
|
|
||||||
# Development
|
# Development
|
||||||
pytest==8.0.2
|
pytest==8.0.2
|
||||||
|
pytest-dash==2.1.2
|
||||||
black==24.2.0
|
black==24.2.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
66
requirements.txt
Normal file
66
requirements.txt
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
# Job Forge - Combined requirements for local development and testing
|
||||||
|
# This file combines backend and frontend requirements for easy local setup
|
||||||
|
|
||||||
|
# FastAPI and web framework
|
||||||
|
fastapi==0.109.2
|
||||||
|
uvicorn[standard]==0.27.1
|
||||||
|
python-multipart==0.0.9
|
||||||
|
|
||||||
|
# Database
|
||||||
|
asyncpg==0.29.0
|
||||||
|
sqlalchemy[asyncio]==2.0.29
|
||||||
|
alembic==1.13.1
|
||||||
|
psycopg2-binary==2.9.9
|
||||||
|
|
||||||
|
# Authentication & Security
|
||||||
|
python-jose[cryptography]==3.3.0
|
||||||
|
passlib[bcrypt]==1.7.4
|
||||||
|
bcrypt==4.1.2
|
||||||
|
|
||||||
|
# AI Services
|
||||||
|
anthropic==0.21.3
|
||||||
|
openai==1.12.0
|
||||||
|
|
||||||
|
# Vector operations
|
||||||
|
pgvector==0.2.5
|
||||||
|
numpy==1.26.4
|
||||||
|
|
||||||
|
# Data validation
|
||||||
|
pydantic[email]==2.6.3
|
||||||
|
pydantic-settings==2.2.1
|
||||||
|
|
||||||
|
# HTTP client
|
||||||
|
httpx==0.27.0
|
||||||
|
aiohttp==3.9.3
|
||||||
|
requests==2.31.0
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
structlog==24.1.0
|
||||||
|
tenacity==8.2.3
|
||||||
|
|
||||||
|
# Dash and frontend
|
||||||
|
dash==2.16.1
|
||||||
|
dash-mantine-components==0.12.1
|
||||||
|
dash-iconify==0.1.2
|
||||||
|
|
||||||
|
# Data handling
|
||||||
|
pandas==2.2.1
|
||||||
|
plotly==5.18.0
|
||||||
|
|
||||||
|
# File handling
|
||||||
|
Pillow==10.2.0
|
||||||
|
|
||||||
|
# Development & Testing
|
||||||
|
pytest==8.0.2
|
||||||
|
pytest-asyncio==0.23.5
|
||||||
|
pytest-cov==4.0.0
|
||||||
|
pytest-mock==3.12.0
|
||||||
|
pytest-dash==2.1.2
|
||||||
|
black==24.2.0
|
||||||
|
isort==5.13.2
|
||||||
|
flake8==7.0.0
|
||||||
|
mypy==1.8.0
|
||||||
|
|
||||||
|
# Security
|
||||||
|
bandit==1.7.7
|
||||||
1
src/__init__.py
Normal file
1
src/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Job Forge source package
|
||||||
0
src/backend/__init__.py
Normal file
0
src/backend/__init__.py
Normal file
0
src/backend/api/__init__.py
Normal file
0
src/backend/api/__init__.py
Normal file
139
src/backend/api/ai_documents.py
Normal file
139
src/backend/api/ai_documents.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"""
|
||||||
|
AI Document Generation API - Simple implementation for MVP
|
||||||
|
"""
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from ..services.ai_service import ai_service
|
||||||
|
from ..models.user import User
|
||||||
|
from .auth import get_current_user
|
||||||
|
|
||||||
|
logger = structlog.get_logger()
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
class CoverLetterRequest(BaseModel):
|
||||||
|
job_description: str
|
||||||
|
company_name: str
|
||||||
|
role_title: str
|
||||||
|
job_url: Optional[str] = None
|
||||||
|
user_resume: Optional[str] = None
|
||||||
|
|
||||||
|
class ResumeOptimizationRequest(BaseModel):
|
||||||
|
current_resume: str
|
||||||
|
job_description: str
|
||||||
|
role_title: str
|
||||||
|
|
||||||
|
class DocumentResponse(BaseModel):
|
||||||
|
content: str
|
||||||
|
model_used: str
|
||||||
|
generation_prompt: str
|
||||||
|
|
||||||
|
@router.post("/generate-cover-letter", response_model=DocumentResponse)
|
||||||
|
async def generate_cover_letter(
|
||||||
|
request: CoverLetterRequest,
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Generate a personalized cover letter using AI
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info("Generating cover letter",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
company=request.company_name,
|
||||||
|
role=request.role_title)
|
||||||
|
|
||||||
|
result = await ai_service.generate_cover_letter(
|
||||||
|
job_description=request.job_description,
|
||||||
|
company_name=request.company_name,
|
||||||
|
role_title=request.role_title,
|
||||||
|
user_name=current_user.full_name,
|
||||||
|
user_resume=request.user_resume
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("Cover letter generated successfully",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
model_used=result["model_used"])
|
||||||
|
|
||||||
|
return DocumentResponse(
|
||||||
|
content=result["content"],
|
||||||
|
model_used=result["model_used"],
|
||||||
|
generation_prompt=result["prompt"]
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Cover letter generation failed",
|
||||||
|
error=str(e),
|
||||||
|
user_id=str(current_user.id))
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to generate cover letter"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/optimize-resume", response_model=DocumentResponse)
|
||||||
|
async def optimize_resume(
|
||||||
|
request: ResumeOptimizationRequest,
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Optimize resume for specific job requirements using AI
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info("Optimizing resume",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
role=request.role_title)
|
||||||
|
|
||||||
|
result = await ai_service.generate_resume_optimization(
|
||||||
|
current_resume=request.current_resume,
|
||||||
|
job_description=request.job_description,
|
||||||
|
role_title=request.role_title
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("Resume optimized successfully",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
model_used=result["model_used"])
|
||||||
|
|
||||||
|
return DocumentResponse(
|
||||||
|
content=result["content"],
|
||||||
|
model_used=result["model_used"],
|
||||||
|
generation_prompt=result["prompt"]
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Resume optimization failed",
|
||||||
|
error=str(e),
|
||||||
|
user_id=str(current_user.id))
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to optimize resume"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/test-ai-connection")
|
||||||
|
async def test_ai_connection(current_user: User = Depends(get_current_user)):
|
||||||
|
"""
|
||||||
|
Test if AI services are properly configured
|
||||||
|
"""
|
||||||
|
status_info = {
|
||||||
|
"claude_available": ai_service.claude_client is not None,
|
||||||
|
"openai_available": ai_service.openai_client is not None,
|
||||||
|
"user": current_user.full_name
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test with a simple generation
|
||||||
|
try:
|
||||||
|
test_result = await ai_service.generate_cover_letter(
|
||||||
|
job_description="Software Engineer position requiring Python skills",
|
||||||
|
company_name="Test Company",
|
||||||
|
role_title="Software Engineer",
|
||||||
|
user_name=current_user.full_name
|
||||||
|
)
|
||||||
|
status_info["test_generation"] = "success"
|
||||||
|
status_info["model_used"] = test_result["model_used"]
|
||||||
|
status_info["content_preview"] = test_result["content"][:100] + "..."
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
status_info["test_generation"] = "failed"
|
||||||
|
status_info["error"] = str(e)
|
||||||
|
|
||||||
|
return status_info
|
||||||
205
src/backend/api/applications.py
Normal file
205
src/backend/api/applications.py
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from ..core.database import get_db
|
||||||
|
from ..models.user import User
|
||||||
|
from ..models.application import Application, ApplicationStatus
|
||||||
|
from ..models.job import Job
|
||||||
|
from .auth import get_current_user
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
class ApplicationCreate(BaseModel):
|
||||||
|
job_id: int
|
||||||
|
notes: Optional[str] = None
|
||||||
|
|
||||||
|
class ApplicationUpdate(BaseModel):
|
||||||
|
status: Optional[ApplicationStatus] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
applied_date: Optional[datetime] = None
|
||||||
|
follow_up_date: Optional[datetime] = None
|
||||||
|
|
||||||
|
class ApplicationResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
job_id: int
|
||||||
|
status: ApplicationStatus
|
||||||
|
notes: Optional[str]
|
||||||
|
applied_date: Optional[datetime]
|
||||||
|
follow_up_date: Optional[datetime]
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class ApplicationWithJobResponse(ApplicationResponse):
|
||||||
|
job_title: str
|
||||||
|
company: str
|
||||||
|
location: Optional[str]
|
||||||
|
|
||||||
|
@router.post("/", response_model=ApplicationResponse)
|
||||||
|
async def create_application(
|
||||||
|
application_data: ApplicationCreate,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
# Verify job exists
|
||||||
|
job = await db.get(Job, application_data.job_id)
|
||||||
|
if not job:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Job not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if application already exists
|
||||||
|
existing = await db.execute(
|
||||||
|
select(Application).where(
|
||||||
|
Application.user_id == current_user.id,
|
||||||
|
Application.job_id == application_data.job_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if existing.scalar_one_or_none():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Application already exists for this job"
|
||||||
|
)
|
||||||
|
|
||||||
|
application = Application(
|
||||||
|
user_id=current_user.id,
|
||||||
|
job_id=application_data.job_id,
|
||||||
|
notes=application_data.notes
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(application)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(application)
|
||||||
|
|
||||||
|
return ApplicationResponse.from_orm(application)
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[ApplicationWithJobResponse])
|
||||||
|
async def get_applications(
|
||||||
|
status: Optional[ApplicationStatus] = None,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
query = select(Application, Job).join(Job).where(Application.user_id == current_user.id)
|
||||||
|
|
||||||
|
if status:
|
||||||
|
query = query.where(Application.status == status)
|
||||||
|
|
||||||
|
result = await db.execute(query)
|
||||||
|
applications = []
|
||||||
|
|
||||||
|
for app, job in result.all():
|
||||||
|
app_dict = {
|
||||||
|
"id": app.id,
|
||||||
|
"job_id": app.job_id,
|
||||||
|
"status": app.status,
|
||||||
|
"notes": app.notes,
|
||||||
|
"applied_date": app.applied_date,
|
||||||
|
"follow_up_date": app.follow_up_date,
|
||||||
|
"created_at": app.created_at,
|
||||||
|
"job_title": job.title,
|
||||||
|
"company": job.company,
|
||||||
|
"location": job.location
|
||||||
|
}
|
||||||
|
applications.append(ApplicationWithJobResponse(**app_dict))
|
||||||
|
|
||||||
|
return applications
|
||||||
|
|
||||||
|
@router.get("/{application_id}", response_model=ApplicationWithJobResponse)
|
||||||
|
async def get_application(
|
||||||
|
application_id: int,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
result = await db.execute(
|
||||||
|
select(Application, Job)
|
||||||
|
.join(Job)
|
||||||
|
.where(
|
||||||
|
Application.id == application_id,
|
||||||
|
Application.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
app_job = result.first()
|
||||||
|
|
||||||
|
if not app_job:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
app, job = app_job
|
||||||
|
return ApplicationWithJobResponse(
|
||||||
|
id=app.id,
|
||||||
|
job_id=app.job_id,
|
||||||
|
status=app.status,
|
||||||
|
notes=app.notes,
|
||||||
|
applied_date=app.applied_date,
|
||||||
|
follow_up_date=app.follow_up_date,
|
||||||
|
created_at=app.created_at,
|
||||||
|
job_title=job.title,
|
||||||
|
company=job.company,
|
||||||
|
location=job.location
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.put("/{application_id}", response_model=ApplicationResponse)
|
||||||
|
async def update_application(
|
||||||
|
application_id: int,
|
||||||
|
update_data: ApplicationUpdate,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
application = await db.execute(
|
||||||
|
select(Application).where(
|
||||||
|
Application.id == application_id,
|
||||||
|
Application.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
application = application.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not application:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
update_dict = update_data.dict(exclude_unset=True)
|
||||||
|
if update_dict.get("status") == ApplicationStatus.APPLIED and not application.applied_date:
|
||||||
|
update_dict["applied_date"] = datetime.utcnow()
|
||||||
|
|
||||||
|
for field, value in update_dict.items():
|
||||||
|
setattr(application, field, value)
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(application)
|
||||||
|
|
||||||
|
return ApplicationResponse.from_orm(application)
|
||||||
|
|
||||||
|
@router.delete("/{application_id}")
|
||||||
|
async def delete_application(
|
||||||
|
application_id: int,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
application = await db.execute(
|
||||||
|
select(Application).where(
|
||||||
|
Application.id == application_id,
|
||||||
|
Application.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
application = application.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not application:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
await db.delete(application)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
return {"message": "Application deleted successfully"}
|
||||||
139
src/backend/api/auth.py
Normal file
139
src/backend/api/auth.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from pydantic import BaseModel, EmailStr
|
||||||
|
from typing import Optional
|
||||||
|
import uuid
|
||||||
|
import bcrypt
|
||||||
|
from jose import jwt
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from ..core.database import get_db
|
||||||
|
from ..core.config import settings
|
||||||
|
from ..models.user import User
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
class UserCreate(BaseModel):
|
||||||
|
email: EmailStr
|
||||||
|
password: str
|
||||||
|
first_name: str
|
||||||
|
last_name: str
|
||||||
|
phone: Optional[str] = None
|
||||||
|
|
||||||
|
class UserLogin(BaseModel):
|
||||||
|
email: EmailStr
|
||||||
|
password: str
|
||||||
|
|
||||||
|
class Token(BaseModel):
|
||||||
|
access_token: str
|
||||||
|
token_type: str = "bearer"
|
||||||
|
|
||||||
|
class UserResponse(BaseModel):
|
||||||
|
id: str
|
||||||
|
email: str
|
||||||
|
full_name: str
|
||||||
|
first_name: str
|
||||||
|
last_name: str
|
||||||
|
is_active: bool
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_user(cls, user):
|
||||||
|
return cls(
|
||||||
|
id=str(user.id),
|
||||||
|
email=user.email,
|
||||||
|
full_name=user.full_name,
|
||||||
|
first_name=user.first_name,
|
||||||
|
last_name=user.last_name,
|
||||||
|
is_active=user.is_active
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_access_token(data: dict):
|
||||||
|
to_encode = data.copy()
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=settings.jwt_expire_minutes)
|
||||||
|
to_encode.update({"exp": expire})
|
||||||
|
return jwt.encode(to_encode, settings.jwt_secret_key, algorithm=settings.jwt_algorithm)
|
||||||
|
|
||||||
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
return bcrypt.checkpw(plain_password.encode('utf-8'), hashed_password.encode('utf-8'))
|
||||||
|
|
||||||
|
def hash_password(password: str) -> str:
|
||||||
|
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt()).decode('utf-8')
|
||||||
|
|
||||||
|
async def get_current_user(
|
||||||
|
credentials: HTTPAuthorizationCredentials = Depends(security),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(
|
||||||
|
credentials.credentials,
|
||||||
|
settings.jwt_secret_key,
|
||||||
|
algorithms=[settings.jwt_algorithm]
|
||||||
|
)
|
||||||
|
user_id: int = payload.get("sub")
|
||||||
|
if user_id is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid authentication credentials"
|
||||||
|
)
|
||||||
|
except jwt.JWTError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid authentication credentials"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = await db.get(User, user_id)
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="User not found"
|
||||||
|
)
|
||||||
|
return user
|
||||||
|
|
||||||
|
@router.post("/register", response_model=UserResponse)
|
||||||
|
async def register(user_data: UserCreate, db: AsyncSession = Depends(get_db)):
|
||||||
|
# Check if user exists
|
||||||
|
existing_user = await db.execute(
|
||||||
|
User.__table__.select().where(User.email == user_data.email)
|
||||||
|
)
|
||||||
|
if existing_user.first():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Email already registered"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new user
|
||||||
|
hashed_pwd = hash_password(user_data.password)
|
||||||
|
full_name = f"{user_data.first_name} {user_data.last_name}"
|
||||||
|
user = User(
|
||||||
|
email=user_data.email,
|
||||||
|
password_hash=hashed_pwd,
|
||||||
|
full_name=full_name
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(user)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(user)
|
||||||
|
|
||||||
|
return UserResponse.from_user(user)
|
||||||
|
|
||||||
|
@router.post("/login", response_model=Token)
|
||||||
|
async def login(login_data: UserLogin, db: AsyncSession = Depends(get_db)):
|
||||||
|
user_result = await db.execute(
|
||||||
|
User.__table__.select().where(User.email == login_data.email)
|
||||||
|
)
|
||||||
|
user_row = user_result.first()
|
||||||
|
|
||||||
|
if not user_row or not verify_password(login_data.password, user_row.password_hash):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Incorrect email or password"
|
||||||
|
)
|
||||||
|
|
||||||
|
access_token = create_access_token(data={"sub": str(user_row.id)})
|
||||||
|
return Token(access_token=access_token)
|
||||||
|
|
||||||
|
@router.get("/me", response_model=UserResponse)
|
||||||
|
async def get_current_user_info(current_user: User = Depends(get_current_user)):
|
||||||
|
return UserResponse.from_user(current_user)
|
||||||
184
src/backend/api/documents.py
Normal file
184
src/backend/api/documents.py
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, UploadFile, File
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List, Optional
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from ..core.database import get_db
|
||||||
|
from ..models.user import User
|
||||||
|
from ..models.document import Document, DocumentType
|
||||||
|
from .auth import get_current_user
|
||||||
|
|
||||||
|
logger = structlog.get_logger()
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
class DocumentResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
filename: str
|
||||||
|
document_type: DocumentType
|
||||||
|
file_size: Optional[int]
|
||||||
|
ai_generated: str
|
||||||
|
created_at: str
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class DocumentCreate(BaseModel):
|
||||||
|
filename: str
|
||||||
|
document_type: DocumentType
|
||||||
|
text_content: Optional[str] = None
|
||||||
|
|
||||||
|
@router.post("/upload", response_model=DocumentResponse)
|
||||||
|
async def upload_document(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
document_type: DocumentType = DocumentType.OTHER,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
file_content = await file.read()
|
||||||
|
|
||||||
|
# Basic file validation
|
||||||
|
if len(file_content) > 10 * 1024 * 1024: # 10MB limit
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||||
|
detail="File too large. Maximum size is 10MB."
|
||||||
|
)
|
||||||
|
|
||||||
|
document = Document(
|
||||||
|
user_id=current_user.id,
|
||||||
|
filename=file.filename or "uploaded_file",
|
||||||
|
original_filename=file.filename,
|
||||||
|
document_type=document_type,
|
||||||
|
file_size=len(file_content),
|
||||||
|
mime_type=file.content_type,
|
||||||
|
file_content=file_content,
|
||||||
|
ai_generated="false"
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(document)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(document)
|
||||||
|
|
||||||
|
logger.info("Document uploaded",
|
||||||
|
user_id=current_user.id,
|
||||||
|
document_id=document.id,
|
||||||
|
filename=file.filename)
|
||||||
|
|
||||||
|
return DocumentResponse.from_orm(document)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Document upload failed", error=str(e))
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to upload document"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[DocumentResponse])
|
||||||
|
async def get_documents(
|
||||||
|
document_type: Optional[DocumentType] = None,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
query = select(Document).where(Document.user_id == current_user.id)
|
||||||
|
|
||||||
|
if document_type:
|
||||||
|
query = query.where(Document.document_type == document_type)
|
||||||
|
|
||||||
|
query = query.order_by(Document.created_at.desc())
|
||||||
|
|
||||||
|
result = await db.execute(query)
|
||||||
|
documents = result.scalars().all()
|
||||||
|
|
||||||
|
return [DocumentResponse.from_orm(doc) for doc in documents]
|
||||||
|
|
||||||
|
@router.get("/{document_id}", response_model=DocumentResponse)
|
||||||
|
async def get_document(
|
||||||
|
document_id: int,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
document = await db.execute(
|
||||||
|
select(Document).where(
|
||||||
|
Document.id == document_id,
|
||||||
|
Document.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
document = document.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not document:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Document not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
return DocumentResponse.from_orm(document)
|
||||||
|
|
||||||
|
@router.delete("/{document_id}")
|
||||||
|
async def delete_document(
|
||||||
|
document_id: int,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
document = await db.execute(
|
||||||
|
select(Document).where(
|
||||||
|
Document.id == document_id,
|
||||||
|
Document.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
document = document.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not document:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Document not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
await db.delete(document)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
logger.info("Document deleted",
|
||||||
|
user_id=current_user.id,
|
||||||
|
document_id=document_id)
|
||||||
|
|
||||||
|
return {"message": "Document deleted successfully"}
|
||||||
|
|
||||||
|
@router.post("/generate-cover-letter")
|
||||||
|
async def generate_cover_letter(
|
||||||
|
job_id: int,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
# Placeholder for AI cover letter generation
|
||||||
|
# In full implementation, this would use Claude/OpenAI APIs
|
||||||
|
|
||||||
|
cover_letter_content = f"""
|
||||||
|
Dear Hiring Manager,
|
||||||
|
|
||||||
|
I am writing to express my interest in the position at your company.
|
||||||
|
[AI-generated content would be here based on job requirements and user profile]
|
||||||
|
|
||||||
|
Best regards,
|
||||||
|
{current_user.full_name}
|
||||||
|
"""
|
||||||
|
|
||||||
|
document = Document(
|
||||||
|
user_id=current_user.id,
|
||||||
|
filename=f"cover_letter_job_{job_id}.txt",
|
||||||
|
document_type=DocumentType.COVER_LETTER,
|
||||||
|
text_content=cover_letter_content,
|
||||||
|
ai_generated="true",
|
||||||
|
ai_model_used="claude-3",
|
||||||
|
generation_prompt=f"Generate cover letter for job ID {job_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(document)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(document)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "Cover letter generated successfully",
|
||||||
|
"document_id": document.id,
|
||||||
|
"content": cover_letter_content
|
||||||
|
}
|
||||||
405
src/backend/api/job_applications.py
Normal file
405
src/backend/api/job_applications.py
Normal file
@@ -0,0 +1,405 @@
|
|||||||
|
"""
|
||||||
|
Job Applications API that matches the actual database schema and includes AI features
|
||||||
|
"""
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select
|
||||||
|
from pydantic import BaseModel, HttpUrl
|
||||||
|
from typing import List, Optional
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from ..core.database import get_db
|
||||||
|
from ..models.user import User
|
||||||
|
from ..models.job_application import JobApplication, PriorityLevel, ApplicationStatus
|
||||||
|
from ..models.job_document import JobDocument, DocumentTypeEnum
|
||||||
|
from ..services.ai_service import ai_service
|
||||||
|
from .auth import get_current_user
|
||||||
|
|
||||||
|
logger = structlog.get_logger()
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
class ApplicationCreate(BaseModel):
|
||||||
|
name: str
|
||||||
|
company_name: str
|
||||||
|
role_title: str
|
||||||
|
job_description: str
|
||||||
|
job_url: Optional[str] = None
|
||||||
|
location: Optional[str] = None
|
||||||
|
priority_level: PriorityLevel = PriorityLevel.MEDIUM
|
||||||
|
|
||||||
|
class ApplicationUpdate(BaseModel):
|
||||||
|
name: Optional[str] = None
|
||||||
|
status: Optional[ApplicationStatus] = None
|
||||||
|
priority_level: Optional[PriorityLevel] = None
|
||||||
|
job_url: Optional[str] = None
|
||||||
|
location: Optional[str] = None
|
||||||
|
|
||||||
|
class ApplicationResponse(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
company_name: str
|
||||||
|
role_title: str
|
||||||
|
job_url: Optional[str]
|
||||||
|
location: Optional[str]
|
||||||
|
priority_level: PriorityLevel
|
||||||
|
status: ApplicationStatus
|
||||||
|
research_completed: bool
|
||||||
|
resume_optimized: bool
|
||||||
|
cover_letter_generated: bool
|
||||||
|
created_at: str
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_application(cls, app: JobApplication):
|
||||||
|
return cls(
|
||||||
|
id=str(app.id),
|
||||||
|
name=app.name,
|
||||||
|
company_name=app.company_name,
|
||||||
|
role_title=app.role_title,
|
||||||
|
job_url=app.job_url,
|
||||||
|
location=app.location,
|
||||||
|
priority_level=app.priority_level,
|
||||||
|
status=app.status,
|
||||||
|
research_completed=app.research_completed,
|
||||||
|
resume_optimized=app.resume_optimized,
|
||||||
|
cover_letter_generated=app.cover_letter_generated,
|
||||||
|
created_at=app.created_at.isoformat()
|
||||||
|
)
|
||||||
|
|
||||||
|
class DocumentResponse(BaseModel):
|
||||||
|
id: str
|
||||||
|
document_type: DocumentTypeEnum
|
||||||
|
content: str
|
||||||
|
created_at: str
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_document(cls, doc: JobDocument):
|
||||||
|
return cls(
|
||||||
|
id=str(doc.id),
|
||||||
|
document_type=doc.document_type,
|
||||||
|
content=doc.content,
|
||||||
|
created_at=doc.created_at.isoformat()
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/", response_model=ApplicationResponse)
|
||||||
|
async def create_application(
|
||||||
|
application_data: ApplicationCreate,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Create a new job application"""
|
||||||
|
application = JobApplication(
|
||||||
|
user_id=current_user.id,
|
||||||
|
name=application_data.name,
|
||||||
|
company_name=application_data.company_name,
|
||||||
|
role_title=application_data.role_title,
|
||||||
|
job_description=application_data.job_description,
|
||||||
|
job_url=application_data.job_url,
|
||||||
|
location=application_data.location,
|
||||||
|
priority_level=application_data.priority_level
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(application)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(application)
|
||||||
|
|
||||||
|
logger.info("Application created",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
application_id=str(application.id),
|
||||||
|
company=application_data.company_name)
|
||||||
|
|
||||||
|
return ApplicationResponse.from_application(application)
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[ApplicationResponse])
|
||||||
|
async def get_applications(
|
||||||
|
status: Optional[ApplicationStatus] = None,
|
||||||
|
priority: Optional[PriorityLevel] = None,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get all applications for the current user"""
|
||||||
|
query = select(JobApplication).where(JobApplication.user_id == current_user.id)
|
||||||
|
|
||||||
|
if status:
|
||||||
|
query = query.where(JobApplication.status == status)
|
||||||
|
if priority:
|
||||||
|
query = query.where(JobApplication.priority_level == priority)
|
||||||
|
|
||||||
|
query = query.order_by(JobApplication.created_at.desc())
|
||||||
|
|
||||||
|
result = await db.execute(query)
|
||||||
|
applications = result.scalars().all()
|
||||||
|
|
||||||
|
return [ApplicationResponse.from_application(app) for app in applications]
|
||||||
|
|
||||||
|
@router.get("/{application_id}", response_model=ApplicationResponse)
|
||||||
|
async def get_application(
|
||||||
|
application_id: str,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get a specific application"""
|
||||||
|
result = await db.execute(
|
||||||
|
select(JobApplication).where(
|
||||||
|
JobApplication.id == application_id,
|
||||||
|
JobApplication.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
application = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not application:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
return ApplicationResponse.from_application(application)
|
||||||
|
|
||||||
|
@router.put("/{application_id}", response_model=ApplicationResponse)
|
||||||
|
async def update_application(
|
||||||
|
application_id: str,
|
||||||
|
update_data: ApplicationUpdate,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Update an application"""
|
||||||
|
result = await db.execute(
|
||||||
|
select(JobApplication).where(
|
||||||
|
JobApplication.id == application_id,
|
||||||
|
JobApplication.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
application = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not application:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
update_dict = update_data.dict(exclude_unset=True)
|
||||||
|
for field, value in update_dict.items():
|
||||||
|
setattr(application, field, value)
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(application)
|
||||||
|
|
||||||
|
return ApplicationResponse.from_application(application)
|
||||||
|
|
||||||
|
@router.post("/{application_id}/generate-cover-letter", response_model=DocumentResponse)
|
||||||
|
async def generate_cover_letter(
|
||||||
|
application_id: str,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Generate AI cover letter for the application"""
|
||||||
|
# Get the application
|
||||||
|
result = await db.execute(
|
||||||
|
select(JobApplication).where(
|
||||||
|
JobApplication.id == application_id,
|
||||||
|
JobApplication.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
application = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not application:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if cover letter already exists
|
||||||
|
existing_doc = await db.execute(
|
||||||
|
select(JobDocument).where(
|
||||||
|
JobDocument.application_id == application_id,
|
||||||
|
JobDocument.document_type == DocumentTypeEnum.COVER_LETTER
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if existing_doc.scalar_one_or_none():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Cover letter already exists for this application"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate cover letter using AI
|
||||||
|
try:
|
||||||
|
ai_result = await ai_service.generate_cover_letter(
|
||||||
|
job_description=application.job_description,
|
||||||
|
company_name=application.company_name,
|
||||||
|
role_title=application.role_title,
|
||||||
|
user_name=current_user.full_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create document
|
||||||
|
document = JobDocument(
|
||||||
|
application_id=application.id,
|
||||||
|
document_type=DocumentTypeEnum.COVER_LETTER,
|
||||||
|
content=ai_result["content"]
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(document)
|
||||||
|
|
||||||
|
# Update application flags
|
||||||
|
application.cover_letter_generated = True
|
||||||
|
if application.status == ApplicationStatus.DRAFT:
|
||||||
|
application.status = ApplicationStatus.COVER_LETTER_READY
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(document)
|
||||||
|
|
||||||
|
logger.info("Cover letter generated",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
application_id=application_id,
|
||||||
|
model_used=ai_result["model_used"])
|
||||||
|
|
||||||
|
return DocumentResponse.from_document(document)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Cover letter generation failed",
|
||||||
|
error=str(e),
|
||||||
|
application_id=application_id)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to generate cover letter"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/{application_id}/optimize-resume")
|
||||||
|
async def optimize_resume(
|
||||||
|
application_id: str,
|
||||||
|
resume_content: str,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Generate optimized resume for the application"""
|
||||||
|
# Get the application
|
||||||
|
result = await db.execute(
|
||||||
|
select(JobApplication).where(
|
||||||
|
JobApplication.id == application_id,
|
||||||
|
JobApplication.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
application = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not application:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate optimized resume using AI
|
||||||
|
try:
|
||||||
|
ai_result = await ai_service.generate_resume_optimization(
|
||||||
|
current_resume=resume_content,
|
||||||
|
job_description=application.job_description,
|
||||||
|
role_title=application.role_title
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if optimized resume already exists
|
||||||
|
existing_doc = await db.execute(
|
||||||
|
select(JobDocument).where(
|
||||||
|
JobDocument.application_id == application_id,
|
||||||
|
JobDocument.document_type == DocumentTypeEnum.OPTIMIZED_RESUME
|
||||||
|
)
|
||||||
|
)
|
||||||
|
existing = existing_doc.scalar_one_or_none()
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
# Update existing document
|
||||||
|
existing.content = ai_result["content"]
|
||||||
|
document = existing
|
||||||
|
else:
|
||||||
|
# Create new document
|
||||||
|
document = JobDocument(
|
||||||
|
application_id=application.id,
|
||||||
|
document_type=DocumentTypeEnum.OPTIMIZED_RESUME,
|
||||||
|
content=ai_result["content"]
|
||||||
|
)
|
||||||
|
db.add(document)
|
||||||
|
|
||||||
|
# Update application flags
|
||||||
|
application.resume_optimized = True
|
||||||
|
if application.status == ApplicationStatus.DRAFT:
|
||||||
|
application.status = ApplicationStatus.RESUME_READY
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(document)
|
||||||
|
|
||||||
|
logger.info("Resume optimized",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
application_id=application_id,
|
||||||
|
model_used=ai_result["model_used"])
|
||||||
|
|
||||||
|
return DocumentResponse.from_document(document)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Resume optimization failed",
|
||||||
|
error=str(e),
|
||||||
|
application_id=application_id)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to optimize resume"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/{application_id}/documents", response_model=List[DocumentResponse])
|
||||||
|
async def get_application_documents(
|
||||||
|
application_id: str,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get all documents for an application"""
|
||||||
|
# Verify application belongs to user
|
||||||
|
app_result = await db.execute(
|
||||||
|
select(JobApplication).where(
|
||||||
|
JobApplication.id == application_id,
|
||||||
|
JobApplication.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if not app_result.scalar_one_or_none():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get documents
|
||||||
|
result = await db.execute(
|
||||||
|
select(JobDocument)
|
||||||
|
.where(JobDocument.application_id == application_id)
|
||||||
|
.order_by(JobDocument.created_at.desc())
|
||||||
|
)
|
||||||
|
documents = result.scalars().all()
|
||||||
|
|
||||||
|
return [DocumentResponse.from_document(doc) for doc in documents]
|
||||||
|
|
||||||
|
@router.delete("/{application_id}")
|
||||||
|
async def delete_application(
|
||||||
|
application_id: str,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Delete an application and all its documents"""
|
||||||
|
result = await db.execute(
|
||||||
|
select(JobApplication).where(
|
||||||
|
JobApplication.id == application_id,
|
||||||
|
JobApplication.user_id == current_user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
application = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not application:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Application not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete documents first (CASCADE should handle this, but being explicit)
|
||||||
|
await db.execute(
|
||||||
|
select(JobDocument).where(JobDocument.application_id == application_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
await db.delete(application)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
logger.info("Application deleted",
|
||||||
|
user_id=str(current_user.id),
|
||||||
|
application_id=application_id)
|
||||||
|
|
||||||
|
return {"message": "Application deleted successfully"}
|
||||||
143
src/backend/api/jobs.py
Normal file
143
src/backend/api/jobs.py
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select, func
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from ..core.database import get_db
|
||||||
|
from ..models.user import User
|
||||||
|
from ..models.job import Job
|
||||||
|
from .auth import get_current_user
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
class JobCreate(BaseModel):
|
||||||
|
title: str
|
||||||
|
company: str
|
||||||
|
location: Optional[str] = None
|
||||||
|
salary_min: Optional[int] = None
|
||||||
|
salary_max: Optional[int] = None
|
||||||
|
remote_option: bool = False
|
||||||
|
description: str
|
||||||
|
requirements: Optional[str] = None
|
||||||
|
benefits: Optional[str] = None
|
||||||
|
source_url: Optional[str] = None
|
||||||
|
source_platform: Optional[str] = None
|
||||||
|
posted_date: Optional[datetime] = None
|
||||||
|
|
||||||
|
class JobResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
title: str
|
||||||
|
company: str
|
||||||
|
location: Optional[str]
|
||||||
|
salary_min: Optional[int]
|
||||||
|
salary_max: Optional[int]
|
||||||
|
remote_option: bool
|
||||||
|
description: str
|
||||||
|
requirements: Optional[str]
|
||||||
|
benefits: Optional[str]
|
||||||
|
source_url: Optional[str]
|
||||||
|
posted_date: Optional[datetime]
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class JobSearchResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
title: str
|
||||||
|
company: str
|
||||||
|
location: Optional[str]
|
||||||
|
salary_min: Optional[int]
|
||||||
|
salary_max: Optional[int]
|
||||||
|
remote_option: bool
|
||||||
|
description: str
|
||||||
|
match_score: Optional[float]
|
||||||
|
posted_date: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
@router.post("/", response_model=JobResponse)
|
||||||
|
async def create_job(
|
||||||
|
job_data: JobCreate,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
job = Job(**job_data.dict())
|
||||||
|
db.add(job)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(job)
|
||||||
|
|
||||||
|
return JobResponse.from_orm(job)
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[JobSearchResponse])
|
||||||
|
async def search_jobs(
|
||||||
|
q: Optional[str] = Query(None, description="Search query"),
|
||||||
|
location: Optional[str] = Query(None, description="Location filter"),
|
||||||
|
remote: Optional[bool] = Query(None, description="Remote work filter"),
|
||||||
|
salary_min: Optional[int] = Query(None, description="Minimum salary"),
|
||||||
|
company: Optional[str] = Query(None, description="Company filter"),
|
||||||
|
limit: int = Query(20, ge=1, le=100),
|
||||||
|
offset: int = Query(0, ge=0),
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
query = select(Job).where(Job.is_active == True)
|
||||||
|
|
||||||
|
if q:
|
||||||
|
search_filter = func.lower(Job.title).contains(q.lower()) | \
|
||||||
|
func.lower(Job.description).contains(q.lower()) | \
|
||||||
|
func.lower(Job.company).contains(q.lower())
|
||||||
|
query = query.where(search_filter)
|
||||||
|
|
||||||
|
if location:
|
||||||
|
query = query.where(func.lower(Job.location).contains(location.lower()))
|
||||||
|
|
||||||
|
if remote is not None:
|
||||||
|
query = query.where(Job.remote_option == remote)
|
||||||
|
|
||||||
|
if salary_min:
|
||||||
|
query = query.where(Job.salary_min >= salary_min)
|
||||||
|
|
||||||
|
if company:
|
||||||
|
query = query.where(func.lower(Job.company).contains(company.lower()))
|
||||||
|
|
||||||
|
query = query.order_by(Job.posted_date.desc().nullslast(), Job.created_at.desc())
|
||||||
|
query = query.offset(offset).limit(limit)
|
||||||
|
|
||||||
|
result = await db.execute(query)
|
||||||
|
jobs = result.scalars().all()
|
||||||
|
|
||||||
|
return [JobSearchResponse.from_orm(job) for job in jobs]
|
||||||
|
|
||||||
|
@router.get("/{job_id}", response_model=JobResponse)
|
||||||
|
async def get_job(
|
||||||
|
job_id: int,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
job = await db.get(Job, job_id)
|
||||||
|
if not job or not job.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Job not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
return JobResponse.from_orm(job)
|
||||||
|
|
||||||
|
@router.get("/recommendations/")
|
||||||
|
async def get_job_recommendations(
|
||||||
|
limit: int = Query(10, ge=1, le=50),
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
# For now, return recent jobs sorted by created date
|
||||||
|
# In a full implementation, this would use AI matching based on user profile
|
||||||
|
query = select(Job).where(Job.is_active == True).order_by(Job.created_at.desc()).limit(limit)
|
||||||
|
|
||||||
|
result = await db.execute(query)
|
||||||
|
jobs = result.scalars().all()
|
||||||
|
|
||||||
|
return [JobSearchResponse.from_orm(job) for job in jobs]
|
||||||
0
src/backend/core/__init__.py
Normal file
0
src/backend/core/__init__.py
Normal file
30
src/backend/core/config.py
Normal file
30
src/backend/core/config.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
from pydantic import Field
|
||||||
|
import os
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
database_url: str = Field(
|
||||||
|
default="postgresql+asyncpg://jobforge_user:jobforge_password@localhost:5432/jobforge_mvp",
|
||||||
|
env="DATABASE_URL"
|
||||||
|
)
|
||||||
|
|
||||||
|
claude_api_key: str = Field(env="CLAUDE_API_KEY")
|
||||||
|
openai_api_key: str = Field(env="OPENAI_API_KEY")
|
||||||
|
jwt_secret_key: str = Field(env="JWT_SECRET_KEY")
|
||||||
|
|
||||||
|
debug: bool = Field(default=False, env="DEBUG")
|
||||||
|
log_level: str = Field(default="INFO", env="LOG_LEVEL")
|
||||||
|
|
||||||
|
jwt_algorithm: str = "HS256"
|
||||||
|
jwt_expire_minutes: int = 60 * 24 * 7 # 7 days
|
||||||
|
|
||||||
|
cors_origins: list[str] = [
|
||||||
|
"http://localhost:8501",
|
||||||
|
"http://frontend:8501"
|
||||||
|
]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
case_sensitive = False
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
49
src/backend/core/database.py
Normal file
49
src/backend/core/database.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||||
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
|
from sqlalchemy import text
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from .config import settings
|
||||||
|
|
||||||
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
|
class Base(DeclarativeBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
engine = create_async_engine(
|
||||||
|
settings.database_url,
|
||||||
|
echo=settings.debug,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=300
|
||||||
|
)
|
||||||
|
|
||||||
|
AsyncSessionLocal = async_sessionmaker(
|
||||||
|
engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_db():
|
||||||
|
async with AsyncSessionLocal() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
except Exception:
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def init_db():
|
||||||
|
try:
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.execute(text("CREATE EXTENSION IF NOT EXISTS vector"))
|
||||||
|
logger.info("Database extensions initialized")
|
||||||
|
|
||||||
|
from ..models import user, application, job, document
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
logger.info("Database tables created")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Database initialization failed", error=str(e))
|
||||||
|
raise
|
||||||
64
src/backend/main.py
Normal file
64
src/backend/main.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
from fastapi import FastAPI, HTTPException
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import structlog
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
|
from .core.config import settings
|
||||||
|
from .core.database import init_db
|
||||||
|
from .api import auth, applications, jobs, documents, ai_documents
|
||||||
|
|
||||||
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI):
|
||||||
|
logger.info("Starting Job Forge backend...")
|
||||||
|
await init_db()
|
||||||
|
logger.info("Database initialized")
|
||||||
|
yield
|
||||||
|
logger.info("Shutting down Job Forge backend...")
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title="Job Forge API",
|
||||||
|
description="AI-Powered Job Application Assistant",
|
||||||
|
version="1.0.0",
|
||||||
|
lifespan=lifespan
|
||||||
|
)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["http://localhost:8501", "http://frontend:8501"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
return {"status": "healthy", "service": "job-forge-backend"}
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return {"message": "Job Forge API", "version": "1.0.0"}
|
||||||
|
|
||||||
|
app.include_router(auth.router, prefix="/api/auth", tags=["Authentication"])
|
||||||
|
app.include_router(applications.router, prefix="/api/applications", tags=["Applications"])
|
||||||
|
app.include_router(jobs.router, prefix="/api/jobs", tags=["Jobs"])
|
||||||
|
app.include_router(documents.router, prefix="/api/documents", tags=["Documents"])
|
||||||
|
app.include_router(ai_documents.router, prefix="/api/ai", tags=["AI Document Generation"])
|
||||||
|
|
||||||
|
@app.exception_handler(HTTPException)
|
||||||
|
async def http_exception_handler(request, exc):
|
||||||
|
logger.error("HTTP exception", status_code=exc.status_code, detail=exc.detail)
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
content={"detail": exc.detail}
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.exception_handler(Exception)
|
||||||
|
async def general_exception_handler(request, exc):
|
||||||
|
logger.error("Unhandled exception", error=str(exc))
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content={"detail": "Internal server error"}
|
||||||
|
)
|
||||||
0
src/backend/services/__init__.py
Normal file
0
src/backend/services/__init__.py
Normal file
222
src/backend/services/ai_service.py
Normal file
222
src/backend/services/ai_service.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
"""
|
||||||
|
AI Service for Job Forge - Handles document generation and AI processing
|
||||||
|
"""
|
||||||
|
import structlog
|
||||||
|
from typing import Dict, Optional
|
||||||
|
import anthropic
|
||||||
|
import openai
|
||||||
|
from ..core.config import settings
|
||||||
|
|
||||||
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
|
class AIService:
|
||||||
|
def __init__(self):
|
||||||
|
self.claude_client = None
|
||||||
|
self.openai_client = None
|
||||||
|
|
||||||
|
# Initialize Claude client if API key is available
|
||||||
|
if settings.claude_api_key:
|
||||||
|
self.claude_client = anthropic.Anthropic(api_key=settings.claude_api_key)
|
||||||
|
|
||||||
|
# Initialize OpenAI client if API key is available
|
||||||
|
if settings.openai_api_key:
|
||||||
|
self.openai_client = openai.AsyncOpenAI(api_key=settings.openai_api_key)
|
||||||
|
|
||||||
|
async def generate_cover_letter(
|
||||||
|
self,
|
||||||
|
job_description: str,
|
||||||
|
company_name: str,
|
||||||
|
role_title: str,
|
||||||
|
user_name: str,
|
||||||
|
user_resume: Optional[str] = None
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
Generate a personalized cover letter using AI
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Construct the prompt
|
||||||
|
prompt = f"""
|
||||||
|
You are a professional career coach helping someone write a compelling cover letter.
|
||||||
|
|
||||||
|
JOB DETAILS:
|
||||||
|
- Company: {company_name}
|
||||||
|
- Role: {role_title}
|
||||||
|
- Job Description: {job_description}
|
||||||
|
|
||||||
|
USER INFORMATION:
|
||||||
|
- Name: {user_name}
|
||||||
|
{f"- Resume/Background: {user_resume[:1000]}..." if user_resume else ""}
|
||||||
|
|
||||||
|
TASK:
|
||||||
|
Write a professional, personalized cover letter that:
|
||||||
|
1. Shows genuine interest in the specific role and company
|
||||||
|
2. Highlights relevant skills from the job description
|
||||||
|
3. Demonstrates understanding of the company's needs
|
||||||
|
4. Uses a professional but engaging tone
|
||||||
|
5. Is 3-4 paragraphs long
|
||||||
|
6. Includes a strong opening and closing
|
||||||
|
|
||||||
|
Format the response as a complete cover letter without any meta-commentary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Try Claude first, fallback to OpenAI
|
||||||
|
if self.claude_client:
|
||||||
|
logger.info("Generating cover letter with Claude")
|
||||||
|
response = self.claude_client.messages.create(
|
||||||
|
model="claude-3-haiku-20240307",
|
||||||
|
max_tokens=1000,
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": prompt}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
content = response.content[0].text
|
||||||
|
model_used = "claude-3-haiku"
|
||||||
|
|
||||||
|
elif self.openai_client:
|
||||||
|
logger.info("Generating cover letter with OpenAI")
|
||||||
|
response = await self.openai_client.chat.completions.create(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[
|
||||||
|
{"role": "system", "content": "You are a professional career coach helping write cover letters."},
|
||||||
|
{"role": "user", "content": prompt}
|
||||||
|
],
|
||||||
|
max_tokens=1000,
|
||||||
|
temperature=0.7
|
||||||
|
)
|
||||||
|
content = response.choices[0].message.content
|
||||||
|
model_used = "gpt-3.5-turbo"
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Fallback to template-based generation
|
||||||
|
logger.warning("No AI API keys available, using template")
|
||||||
|
content = self._generate_template_cover_letter(
|
||||||
|
company_name, role_title, user_name, job_description
|
||||||
|
)
|
||||||
|
model_used = "template"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"content": content,
|
||||||
|
"model_used": model_used,
|
||||||
|
"prompt": prompt[:500] + "..." if len(prompt) > 500 else prompt
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("AI cover letter generation failed", error=str(e))
|
||||||
|
# Fallback to template
|
||||||
|
content = self._generate_template_cover_letter(
|
||||||
|
company_name, role_title, user_name, job_description
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"content": content,
|
||||||
|
"model_used": "template-fallback",
|
||||||
|
"prompt": "Template fallback due to AI service error"
|
||||||
|
}
|
||||||
|
|
||||||
|
async def generate_resume_optimization(
|
||||||
|
self,
|
||||||
|
current_resume: str,
|
||||||
|
job_description: str,
|
||||||
|
role_title: str
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
Optimize resume for specific job requirements
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
prompt = f"""
|
||||||
|
You are an expert resume writer helping optimize a resume for a specific job.
|
||||||
|
|
||||||
|
CURRENT RESUME:
|
||||||
|
{current_resume}
|
||||||
|
|
||||||
|
TARGET JOB:
|
||||||
|
- Role: {role_title}
|
||||||
|
- Job Description: {job_description}
|
||||||
|
|
||||||
|
TASK:
|
||||||
|
Optimize this resume by:
|
||||||
|
1. Highlighting relevant skills mentioned in the job description
|
||||||
|
2. Reordering sections to emphasize most relevant experience
|
||||||
|
3. Using keywords from the job posting
|
||||||
|
4. Maintaining truthfulness - only reorganize/reword existing content
|
||||||
|
5. Keeping the same general structure and format
|
||||||
|
|
||||||
|
Return the optimized resume without meta-commentary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.claude_client:
|
||||||
|
response = self.claude_client.messages.create(
|
||||||
|
model="claude-3-haiku-20240307",
|
||||||
|
max_tokens=2000,
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": prompt}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
content = response.content[0].text
|
||||||
|
model_used = "claude-3-haiku"
|
||||||
|
|
||||||
|
elif self.openai_client:
|
||||||
|
response = await self.openai_client.chat.completions.create(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[
|
||||||
|
{"role": "system", "content": "You are an expert resume writer."},
|
||||||
|
{"role": "user", "content": prompt}
|
||||||
|
],
|
||||||
|
max_tokens=2000,
|
||||||
|
temperature=0.5
|
||||||
|
)
|
||||||
|
content = response.choices[0].message.content
|
||||||
|
model_used = "gpt-3.5-turbo"
|
||||||
|
|
||||||
|
else:
|
||||||
|
content = f"Resume optimization for {role_title}\n\n{current_resume}\n\n[AI optimization would be applied here with API keys configured]"
|
||||||
|
model_used = "template"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"content": content,
|
||||||
|
"model_used": model_used,
|
||||||
|
"prompt": prompt[:500] + "..." if len(prompt) > 500 else prompt
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Resume optimization failed", error=str(e))
|
||||||
|
return {
|
||||||
|
"content": f"Optimized Resume for {role_title}\n\n{current_resume}",
|
||||||
|
"model_used": "template-fallback",
|
||||||
|
"prompt": "Template fallback due to AI service error"
|
||||||
|
}
|
||||||
|
|
||||||
|
def _generate_template_cover_letter(
|
||||||
|
self,
|
||||||
|
company_name: str,
|
||||||
|
role_title: str,
|
||||||
|
user_name: str,
|
||||||
|
job_description: str
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Generate a basic template cover letter when AI services are unavailable
|
||||||
|
"""
|
||||||
|
# Extract a few keywords from job description
|
||||||
|
keywords = []
|
||||||
|
common_skills = ["python", "javascript", "react", "sql", "aws", "docker", "git", "api", "database"]
|
||||||
|
for skill in common_skills:
|
||||||
|
if skill.lower() in job_description.lower():
|
||||||
|
keywords.append(skill.title())
|
||||||
|
|
||||||
|
skills_text = f" with expertise in {', '.join(keywords[:3])}" if keywords else ""
|
||||||
|
|
||||||
|
return f"""Dear Hiring Manager,
|
||||||
|
|
||||||
|
I am writing to express my strong interest in the {role_title} position at {company_name}. Based on the job description, I am excited about the opportunity to contribute to your team{skills_text}.
|
||||||
|
|
||||||
|
Your requirements align well with my background and experience. I am particularly drawn to this role because it represents an excellent opportunity to apply my skills in a dynamic environment while contributing to {company_name}'s continued success.
|
||||||
|
|
||||||
|
I would welcome the opportunity to discuss how my experience and enthusiasm can benefit your team. Thank you for considering my application, and I look forward to hearing from you.
|
||||||
|
|
||||||
|
Best regards,
|
||||||
|
{user_name}
|
||||||
|
|
||||||
|
---
|
||||||
|
[Generated by Job Forge AI Assistant - Configure API keys for enhanced personalization]"""
|
||||||
|
|
||||||
|
# Create a singleton instance
|
||||||
|
ai_service = AIService()
|
||||||
0
src/frontend/__init__.py
Normal file
0
src/frontend/__init__.py
Normal file
235
src/frontend/callbacks.py
Normal file
235
src/frontend/callbacks.py
Normal file
@@ -0,0 +1,235 @@
|
|||||||
|
from dash import Input, Output, State, callback, clientside_callback
|
||||||
|
import dash_mantine_components as dmc
|
||||||
|
import httpx
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from pages.home import create_home_page
|
||||||
|
from pages.auth import create_login_page
|
||||||
|
|
||||||
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
|
def register_callbacks(app, config):
|
||||||
|
@app.callback(
|
||||||
|
Output("page-content", "children"),
|
||||||
|
Output("header-actions", "children"),
|
||||||
|
Input("url", "pathname"),
|
||||||
|
State("auth-store", "data")
|
||||||
|
)
|
||||||
|
def display_page(pathname, auth_data):
|
||||||
|
# Check if user is authenticated
|
||||||
|
is_authenticated = auth_data and auth_data.get("token")
|
||||||
|
|
||||||
|
if not is_authenticated:
|
||||||
|
# Show login page for unauthenticated users
|
||||||
|
if pathname == "/login" or pathname is None or pathname == "/":
|
||||||
|
return create_login_page(), []
|
||||||
|
else:
|
||||||
|
return create_login_page(), []
|
||||||
|
|
||||||
|
# Authenticated user navigation
|
||||||
|
header_actions = [
|
||||||
|
dmc.Button(
|
||||||
|
"Logout",
|
||||||
|
id="logout-btn",
|
||||||
|
variant="outline",
|
||||||
|
color="red",
|
||||||
|
leftIcon="tabler:logout"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Route to different pages
|
||||||
|
if pathname == "/" or pathname is None:
|
||||||
|
return create_home_page(), header_actions
|
||||||
|
elif pathname == "/jobs":
|
||||||
|
return create_jobs_page(), header_actions
|
||||||
|
elif pathname == "/applications":
|
||||||
|
return create_applications_page(), header_actions
|
||||||
|
elif pathname == "/documents":
|
||||||
|
return create_documents_page(), header_actions
|
||||||
|
elif pathname == "/profile":
|
||||||
|
return create_profile_page(), header_actions
|
||||||
|
else:
|
||||||
|
return create_home_page(), header_actions
|
||||||
|
|
||||||
|
@app.callback(
|
||||||
|
Output("auth-store", "data"),
|
||||||
|
Output("auth-alerts", "children"),
|
||||||
|
Input("login-submit", "n_clicks"),
|
||||||
|
State("login-email", "value"),
|
||||||
|
State("login-password", "value"),
|
||||||
|
prevent_initial_call=True
|
||||||
|
)
|
||||||
|
def handle_login(n_clicks, email, password):
|
||||||
|
if not n_clicks or not email or not password:
|
||||||
|
return None, []
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = httpx.post(
|
||||||
|
f"{config.auth_url}/login",
|
||||||
|
json={"email": email, "password": password},
|
||||||
|
timeout=10.0
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
token_data = response.json()
|
||||||
|
auth_data = {
|
||||||
|
"token": token_data["access_token"],
|
||||||
|
"email": email
|
||||||
|
}
|
||||||
|
|
||||||
|
success_alert = dmc.Alert(
|
||||||
|
"Login successful! Redirecting...",
|
||||||
|
title="Success",
|
||||||
|
color="green",
|
||||||
|
duration=3000
|
||||||
|
)
|
||||||
|
|
||||||
|
return auth_data, success_alert
|
||||||
|
else:
|
||||||
|
error_alert = dmc.Alert(
|
||||||
|
"Invalid email or password",
|
||||||
|
title="Login Failed",
|
||||||
|
color="red"
|
||||||
|
)
|
||||||
|
return None, error_alert
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Login error", error=str(e))
|
||||||
|
error_alert = dmc.Alert(
|
||||||
|
"Connection error. Please try again.",
|
||||||
|
title="Error",
|
||||||
|
color="red"
|
||||||
|
)
|
||||||
|
return None, error_alert
|
||||||
|
|
||||||
|
@app.callback(
|
||||||
|
Output("auth-store", "data", allow_duplicate=True),
|
||||||
|
Output("auth-alerts", "children", allow_duplicate=True),
|
||||||
|
Input("register-submit", "n_clicks"),
|
||||||
|
State("register-email", "value"),
|
||||||
|
State("register-password", "value"),
|
||||||
|
State("register-password-confirm", "value"),
|
||||||
|
State("register-first-name", "value"),
|
||||||
|
State("register-last-name", "value"),
|
||||||
|
State("register-phone", "value"),
|
||||||
|
prevent_initial_call=True
|
||||||
|
)
|
||||||
|
def handle_register(n_clicks, email, password, password_confirm, first_name, last_name, phone):
|
||||||
|
if not n_clicks:
|
||||||
|
return None, []
|
||||||
|
|
||||||
|
# Validation
|
||||||
|
if not all([email, password, first_name, last_name]):
|
||||||
|
error_alert = dmc.Alert(
|
||||||
|
"All required fields must be filled",
|
||||||
|
title="Validation Error",
|
||||||
|
color="red"
|
||||||
|
)
|
||||||
|
return None, error_alert
|
||||||
|
|
||||||
|
if password != password_confirm:
|
||||||
|
error_alert = dmc.Alert(
|
||||||
|
"Passwords do not match",
|
||||||
|
title="Validation Error",
|
||||||
|
color="red"
|
||||||
|
)
|
||||||
|
return None, error_alert
|
||||||
|
|
||||||
|
try:
|
||||||
|
user_data = {
|
||||||
|
"email": email,
|
||||||
|
"password": password,
|
||||||
|
"first_name": first_name,
|
||||||
|
"last_name": last_name
|
||||||
|
}
|
||||||
|
if phone:
|
||||||
|
user_data["phone"] = phone
|
||||||
|
|
||||||
|
response = httpx.post(
|
||||||
|
f"{config.auth_url}/register",
|
||||||
|
json=user_data,
|
||||||
|
timeout=10.0
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
# Auto-login after successful registration
|
||||||
|
login_response = httpx.post(
|
||||||
|
f"{config.auth_url}/login",
|
||||||
|
json={"email": email, "password": password},
|
||||||
|
timeout=10.0
|
||||||
|
)
|
||||||
|
|
||||||
|
if login_response.status_code == 200:
|
||||||
|
token_data = login_response.json()
|
||||||
|
auth_data = {
|
||||||
|
"token": token_data["access_token"],
|
||||||
|
"email": email
|
||||||
|
}
|
||||||
|
|
||||||
|
success_alert = dmc.Alert(
|
||||||
|
"Registration successful! Welcome to Job Forge!",
|
||||||
|
title="Success",
|
||||||
|
color="green",
|
||||||
|
duration=3000
|
||||||
|
)
|
||||||
|
|
||||||
|
return auth_data, success_alert
|
||||||
|
|
||||||
|
error_alert = dmc.Alert(
|
||||||
|
"Registration failed. Email may already be in use.",
|
||||||
|
title="Registration Failed",
|
||||||
|
color="red"
|
||||||
|
)
|
||||||
|
return None, error_alert
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Registration error", error=str(e))
|
||||||
|
error_alert = dmc.Alert(
|
||||||
|
"Connection error. Please try again.",
|
||||||
|
title="Error",
|
||||||
|
color="red"
|
||||||
|
)
|
||||||
|
return None, error_alert
|
||||||
|
|
||||||
|
@app.callback(
|
||||||
|
Output("auth-store", "clear_data"),
|
||||||
|
Input("logout-btn", "n_clicks"),
|
||||||
|
prevent_initial_call=True
|
||||||
|
)
|
||||||
|
def handle_logout(n_clicks):
|
||||||
|
if n_clicks:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Placeholder functions for other pages
|
||||||
|
def create_jobs_page():
|
||||||
|
return dmc.Container(
|
||||||
|
children=[
|
||||||
|
dmc.Title("Job Search", mb="lg"),
|
||||||
|
dmc.Text("Job search functionality coming soon...")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_applications_page():
|
||||||
|
return dmc.Container(
|
||||||
|
children=[
|
||||||
|
dmc.Title("My Applications", mb="lg"),
|
||||||
|
dmc.Text("Application tracking functionality coming soon...")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_documents_page():
|
||||||
|
return dmc.Container(
|
||||||
|
children=[
|
||||||
|
dmc.Title("Documents", mb="lg"),
|
||||||
|
dmc.Text("Document management functionality coming soon...")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_profile_page():
|
||||||
|
return dmc.Container(
|
||||||
|
children=[
|
||||||
|
dmc.Title("Profile", mb="lg"),
|
||||||
|
dmc.Text("Profile management functionality coming soon...")
|
||||||
|
]
|
||||||
|
)
|
||||||
27
src/frontend/config.py
Normal file
27
src/frontend/config.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import os
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
def __init__(self):
|
||||||
|
self.BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:8000")
|
||||||
|
self.DEBUG = os.getenv("DEBUG", "false").lower() == "true"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_base_url(self) -> str:
|
||||||
|
return f"{self.BACKEND_URL}/api"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auth_url(self) -> str:
|
||||||
|
return f"{self.api_base_url}/auth"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def applications_url(self) -> str:
|
||||||
|
return f"{self.api_base_url}/applications"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def jobs_url(self) -> str:
|
||||||
|
return f"{self.api_base_url}/jobs"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def documents_url(self) -> str:
|
||||||
|
return f"{self.api_base_url}/documents"
|
||||||
0
src/frontend/layouts/__init__.py
Normal file
0
src/frontend/layouts/__init__.py
Normal file
121
src/frontend/layouts/layout.py
Normal file
121
src/frontend/layouts/layout.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
from dash import html, dcc
|
||||||
|
import dash_mantine_components as dmc
|
||||||
|
from dash_iconify import DashIconify
|
||||||
|
|
||||||
|
def create_layout():
|
||||||
|
return dmc.MantineProvider(
|
||||||
|
theme={
|
||||||
|
"fontFamily": "'Inter', sans-serif",
|
||||||
|
"primaryColor": "blue",
|
||||||
|
"components": {
|
||||||
|
"Button": {"styles": {"root": {"fontWeight": 400}}},
|
||||||
|
"Alert": {"styles": {"title": {"fontWeight": 500}}},
|
||||||
|
"AvatarGroup": {"styles": {"truncated": {"fontWeight": 500}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
children=[
|
||||||
|
dcc.Store(id="auth-store", storage_type="session"),
|
||||||
|
dcc.Store(id="user-store", storage_type="session"),
|
||||||
|
dcc.Location(id="url", refresh=False),
|
||||||
|
|
||||||
|
html.Div(
|
||||||
|
id="main-content",
|
||||||
|
children=[
|
||||||
|
create_header(),
|
||||||
|
html.Div(id="page-content")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_header():
|
||||||
|
return dmc.Header(
|
||||||
|
height=70,
|
||||||
|
fixed=True,
|
||||||
|
children=[
|
||||||
|
dmc.Container(
|
||||||
|
size="xl",
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
position="apart",
|
||||||
|
align="center",
|
||||||
|
style={"height": 70},
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
align="center",
|
||||||
|
spacing="xs",
|
||||||
|
children=[
|
||||||
|
DashIconify(
|
||||||
|
icon="tabler:briefcase",
|
||||||
|
width=32,
|
||||||
|
color="#228BE6"
|
||||||
|
),
|
||||||
|
dmc.Text(
|
||||||
|
"Job Forge",
|
||||||
|
size="xl",
|
||||||
|
weight=700,
|
||||||
|
color="blue"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.Group(
|
||||||
|
id="header-actions",
|
||||||
|
spacing="md",
|
||||||
|
children=[
|
||||||
|
dmc.Button(
|
||||||
|
"Login",
|
||||||
|
id="login-btn",
|
||||||
|
variant="outline",
|
||||||
|
leftIcon=DashIconify(icon="tabler:login")
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_navigation():
|
||||||
|
return dmc.Navbar(
|
||||||
|
width={"base": 300},
|
||||||
|
children=[
|
||||||
|
dmc.ScrollArea(
|
||||||
|
style={"height": "calc(100vh - 70px)"},
|
||||||
|
children=[
|
||||||
|
dmc.NavLink(
|
||||||
|
label="Dashboard",
|
||||||
|
icon=DashIconify(icon="tabler:dashboard"),
|
||||||
|
href="/",
|
||||||
|
id="nav-dashboard"
|
||||||
|
),
|
||||||
|
dmc.NavLink(
|
||||||
|
label="Job Search",
|
||||||
|
icon=DashIconify(icon="tabler:search"),
|
||||||
|
href="/jobs",
|
||||||
|
id="nav-jobs"
|
||||||
|
),
|
||||||
|
dmc.NavLink(
|
||||||
|
label="Applications",
|
||||||
|
icon=DashIconify(icon="tabler:briefcase"),
|
||||||
|
href="/applications",
|
||||||
|
id="nav-applications"
|
||||||
|
),
|
||||||
|
dmc.NavLink(
|
||||||
|
label="Documents",
|
||||||
|
icon=DashIconify(icon="tabler:file-text"),
|
||||||
|
href="/documents",
|
||||||
|
id="nav-documents"
|
||||||
|
),
|
||||||
|
dmc.NavLink(
|
||||||
|
label="Profile",
|
||||||
|
icon=DashIconify(icon="tabler:user"),
|
||||||
|
href="/profile",
|
||||||
|
id="nav-profile"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
36
src/frontend/main.py
Normal file
36
src/frontend/main.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import dash
|
||||||
|
from dash import html, dcc
|
||||||
|
import dash_mantine_components as dmc
|
||||||
|
from dash_iconify import DashIconify
|
||||||
|
import os
|
||||||
|
|
||||||
|
from config import Config
|
||||||
|
from layouts.layout import create_layout
|
||||||
|
from callbacks import register_callbacks
|
||||||
|
|
||||||
|
# Initialize config
|
||||||
|
config = Config()
|
||||||
|
|
||||||
|
# Initialize Dash app
|
||||||
|
app = dash.Dash(
|
||||||
|
__name__,
|
||||||
|
external_stylesheets=[
|
||||||
|
"https://fonts.googleapis.com/css2?family=Inter:wght@100;200;300;400;500;600;700;800;900&display=swap"
|
||||||
|
],
|
||||||
|
suppress_callback_exceptions=True,
|
||||||
|
title="Job Forge - AI-Powered Job Application Assistant"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set up the layout
|
||||||
|
app.layout = create_layout()
|
||||||
|
|
||||||
|
# Register callbacks
|
||||||
|
register_callbacks(app, config)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app.run_server(
|
||||||
|
host="0.0.0.0",
|
||||||
|
port=8501,
|
||||||
|
debug=config.DEBUG,
|
||||||
|
dev_tools_hot_reload=config.DEBUG
|
||||||
|
)
|
||||||
0
src/frontend/pages/__init__.py
Normal file
0
src/frontend/pages/__init__.py
Normal file
164
src/frontend/pages/auth.py
Normal file
164
src/frontend/pages/auth.py
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
from dash import html, dcc
|
||||||
|
import dash_mantine_components as dmc
|
||||||
|
from dash_iconify import DashIconify
|
||||||
|
|
||||||
|
def create_login_page():
|
||||||
|
return dmc.Container(
|
||||||
|
size="xs",
|
||||||
|
style={"marginTop": "10vh"},
|
||||||
|
children=[
|
||||||
|
dmc.Paper(
|
||||||
|
shadow="lg",
|
||||||
|
radius="md",
|
||||||
|
p="xl",
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
position="center",
|
||||||
|
mb="xl",
|
||||||
|
children=[
|
||||||
|
DashIconify(
|
||||||
|
icon="tabler:briefcase",
|
||||||
|
width=40,
|
||||||
|
color="#228BE6"
|
||||||
|
),
|
||||||
|
dmc.Title("Job Forge", order=2, color="blue")
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.Tabs(
|
||||||
|
id="auth-tabs",
|
||||||
|
value="login",
|
||||||
|
children=[
|
||||||
|
dmc.TabsList(
|
||||||
|
grow=True,
|
||||||
|
children=[
|
||||||
|
dmc.Tab("Login", value="login"),
|
||||||
|
dmc.Tab("Register", value="register")
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.TabsPanel(
|
||||||
|
value="login",
|
||||||
|
children=[
|
||||||
|
html.Form(
|
||||||
|
id="login-form",
|
||||||
|
children=[
|
||||||
|
dmc.TextInput(
|
||||||
|
id="login-email",
|
||||||
|
label="Email",
|
||||||
|
placeholder="your.email@example.com",
|
||||||
|
icon=DashIconify(icon="tabler:mail"),
|
||||||
|
required=True,
|
||||||
|
mb="md"
|
||||||
|
),
|
||||||
|
dmc.PasswordInput(
|
||||||
|
id="login-password",
|
||||||
|
label="Password",
|
||||||
|
placeholder="Your password",
|
||||||
|
icon=DashIconify(icon="tabler:lock"),
|
||||||
|
required=True,
|
||||||
|
mb="xl"
|
||||||
|
),
|
||||||
|
dmc.Button(
|
||||||
|
"Login",
|
||||||
|
id="login-submit",
|
||||||
|
fullWidth=True,
|
||||||
|
leftIcon=DashIconify(icon="tabler:login")
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.TabsPanel(
|
||||||
|
value="register",
|
||||||
|
children=[
|
||||||
|
html.Form(
|
||||||
|
id="register-form",
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
grow=True,
|
||||||
|
children=[
|
||||||
|
dmc.TextInput(
|
||||||
|
id="register-first-name",
|
||||||
|
label="First Name",
|
||||||
|
placeholder="John",
|
||||||
|
required=True,
|
||||||
|
style={"flex": 1}
|
||||||
|
),
|
||||||
|
dmc.TextInput(
|
||||||
|
id="register-last-name",
|
||||||
|
label="Last Name",
|
||||||
|
placeholder="Doe",
|
||||||
|
required=True,
|
||||||
|
style={"flex": 1}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
dmc.TextInput(
|
||||||
|
id="register-email",
|
||||||
|
label="Email",
|
||||||
|
placeholder="your.email@example.com",
|
||||||
|
icon=DashIconify(icon="tabler:mail"),
|
||||||
|
required=True,
|
||||||
|
mt="md"
|
||||||
|
),
|
||||||
|
dmc.TextInput(
|
||||||
|
id="register-phone",
|
||||||
|
label="Phone (Optional)",
|
||||||
|
placeholder="+1 (555) 123-4567",
|
||||||
|
icon=DashIconify(icon="tabler:phone"),
|
||||||
|
mt="md"
|
||||||
|
),
|
||||||
|
dmc.PasswordInput(
|
||||||
|
id="register-password",
|
||||||
|
label="Password",
|
||||||
|
placeholder="Your password",
|
||||||
|
icon=DashIconify(icon="tabler:lock"),
|
||||||
|
required=True,
|
||||||
|
mt="md"
|
||||||
|
),
|
||||||
|
dmc.PasswordInput(
|
||||||
|
id="register-password-confirm",
|
||||||
|
label="Confirm Password",
|
||||||
|
placeholder="Confirm your password",
|
||||||
|
icon=DashIconify(icon="tabler:lock"),
|
||||||
|
required=True,
|
||||||
|
mt="md",
|
||||||
|
mb="xl"
|
||||||
|
),
|
||||||
|
dmc.Button(
|
||||||
|
"Register",
|
||||||
|
id="register-submit",
|
||||||
|
fullWidth=True,
|
||||||
|
leftIcon=DashIconify(icon="tabler:user-plus")
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
|
html.Div(id="auth-alerts", style={"marginTop": "1rem"})
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_logout_confirmation():
|
||||||
|
return dmc.Modal(
|
||||||
|
title="Confirm Logout",
|
||||||
|
id="logout-modal",
|
||||||
|
children=[
|
||||||
|
dmc.Text("Are you sure you want to logout?"),
|
||||||
|
dmc.Group(
|
||||||
|
position="right",
|
||||||
|
mt="md",
|
||||||
|
children=[
|
||||||
|
dmc.Button("Cancel", id="logout-cancel", variant="outline"),
|
||||||
|
dmc.Button("Logout", id="logout-confirm", color="red")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
185
src/frontend/pages/home.py
Normal file
185
src/frontend/pages/home.py
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
from dash import html
|
||||||
|
import dash_mantine_components as dmc
|
||||||
|
from dash_iconify import DashIconify
|
||||||
|
|
||||||
|
def create_home_page():
|
||||||
|
return dmc.Container(
|
||||||
|
size="xl",
|
||||||
|
pt="md",
|
||||||
|
children=[
|
||||||
|
dmc.Title("Welcome to Job Forge", order=1, mb="lg"),
|
||||||
|
|
||||||
|
dmc.Grid(
|
||||||
|
children=[
|
||||||
|
dmc.Col(
|
||||||
|
dmc.Card(
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
children=[
|
||||||
|
DashIconify(
|
||||||
|
icon="tabler:search",
|
||||||
|
width=40,
|
||||||
|
color="#228BE6"
|
||||||
|
),
|
||||||
|
dmc.Stack(
|
||||||
|
spacing=5,
|
||||||
|
children=[
|
||||||
|
dmc.Text("Find Jobs", weight=600, size="lg"),
|
||||||
|
dmc.Text(
|
||||||
|
"Search and discover job opportunities",
|
||||||
|
size="sm",
|
||||||
|
color="dimmed"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
dmc.Button(
|
||||||
|
"Search Jobs",
|
||||||
|
fullWidth=True,
|
||||||
|
mt="md",
|
||||||
|
id="home-search-jobs-btn"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
withBorder=True,
|
||||||
|
shadow="sm",
|
||||||
|
radius="md",
|
||||||
|
p="lg"
|
||||||
|
),
|
||||||
|
span=6
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.Col(
|
||||||
|
dmc.Card(
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
children=[
|
||||||
|
DashIconify(
|
||||||
|
icon="tabler:briefcase",
|
||||||
|
width=40,
|
||||||
|
color="#40C057"
|
||||||
|
),
|
||||||
|
dmc.Stack(
|
||||||
|
spacing=5,
|
||||||
|
children=[
|
||||||
|
dmc.Text("Track Applications", weight=600, size="lg"),
|
||||||
|
dmc.Text(
|
||||||
|
"Manage your job applications",
|
||||||
|
size="sm",
|
||||||
|
color="dimmed"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
dmc.Button(
|
||||||
|
"View Applications",
|
||||||
|
fullWidth=True,
|
||||||
|
mt="md",
|
||||||
|
color="green",
|
||||||
|
id="home-applications-btn"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
withBorder=True,
|
||||||
|
shadow="sm",
|
||||||
|
radius="md",
|
||||||
|
p="lg"
|
||||||
|
),
|
||||||
|
span=6
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.Col(
|
||||||
|
dmc.Card(
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
children=[
|
||||||
|
DashIconify(
|
||||||
|
icon="tabler:file-text",
|
||||||
|
width=40,
|
||||||
|
color="#FD7E14"
|
||||||
|
),
|
||||||
|
dmc.Stack(
|
||||||
|
spacing=5,
|
||||||
|
children=[
|
||||||
|
dmc.Text("AI Documents", weight=600, size="lg"),
|
||||||
|
dmc.Text(
|
||||||
|
"Generate resumes and cover letters",
|
||||||
|
size="sm",
|
||||||
|
color="dimmed"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
dmc.Button(
|
||||||
|
"Create Documents",
|
||||||
|
fullWidth=True,
|
||||||
|
mt="md",
|
||||||
|
color="orange",
|
||||||
|
id="home-documents-btn"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
withBorder=True,
|
||||||
|
shadow="sm",
|
||||||
|
radius="md",
|
||||||
|
p="lg"
|
||||||
|
),
|
||||||
|
span=6
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.Col(
|
||||||
|
dmc.Card(
|
||||||
|
children=[
|
||||||
|
dmc.Group(
|
||||||
|
children=[
|
||||||
|
DashIconify(
|
||||||
|
icon="tabler:user",
|
||||||
|
width=40,
|
||||||
|
color="#BE4BDB"
|
||||||
|
),
|
||||||
|
dmc.Stack(
|
||||||
|
spacing=5,
|
||||||
|
children=[
|
||||||
|
dmc.Text("Profile", weight=600, size="lg"),
|
||||||
|
dmc.Text(
|
||||||
|
"Manage your profile and settings",
|
||||||
|
size="sm",
|
||||||
|
color="dimmed"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
dmc.Button(
|
||||||
|
"Edit Profile",
|
||||||
|
fullWidth=True,
|
||||||
|
mt="md",
|
||||||
|
color="violet",
|
||||||
|
id="home-profile-btn"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
withBorder=True,
|
||||||
|
shadow="sm",
|
||||||
|
radius="md",
|
||||||
|
p="lg"
|
||||||
|
),
|
||||||
|
span=6
|
||||||
|
)
|
||||||
|
],
|
||||||
|
gutter="md"
|
||||||
|
),
|
||||||
|
|
||||||
|
dmc.Divider(my="xl"),
|
||||||
|
|
||||||
|
dmc.Title("Recent Activity", order=2, mb="md"),
|
||||||
|
dmc.Card(
|
||||||
|
children=[
|
||||||
|
dmc.Text("No recent activity yet. Start by searching for jobs or uploading your resume!")
|
||||||
|
],
|
||||||
|
withBorder=True,
|
||||||
|
shadow="sm",
|
||||||
|
radius="md",
|
||||||
|
p="lg"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Tests package for Job Forge
|
||||||
@@ -1,34 +1,32 @@
|
|||||||
# Test configuration for Job Forge
|
"""
|
||||||
|
Updated test configuration for Job Forge that matches the actual project structure
|
||||||
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import asyncio
|
import asyncio
|
||||||
import asyncpg
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
from httpx import AsyncClient
|
from httpx import AsyncClient
|
||||||
import os
|
import os
|
||||||
from typing import AsyncGenerator
|
from typing import AsyncGenerator
|
||||||
from unittest.mock import AsyncMock
|
from unittest.mock import AsyncMock, Mock
|
||||||
|
import uuid
|
||||||
|
|
||||||
from app.main import app
|
# Fix import paths to match actual structure
|
||||||
from app.core.database import get_db, Base
|
from src.backend.main import app
|
||||||
from app.core.security import create_access_token
|
from src.backend.core.database import get_db, Base
|
||||||
from app.models.user import User
|
from src.backend.models.user import User
|
||||||
from app.models.application import Application
|
from src.backend.api.auth import create_access_token, hash_password
|
||||||
|
|
||||||
|
# Test database URL (use a separate test database)
|
||||||
# Test database URL
|
|
||||||
TEST_DATABASE_URL = os.getenv(
|
TEST_DATABASE_URL = os.getenv(
|
||||||
"TEST_DATABASE_URL",
|
"TEST_DATABASE_URL",
|
||||||
"postgresql+asyncpg://jobforge:jobforge123@localhost:5432/jobforge_test"
|
"postgresql+asyncpg://jobforge_user:jobforge_password@localhost:5432/jobforge_test"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Test engine and session factory
|
# Test engine
|
||||||
test_engine = create_async_engine(TEST_DATABASE_URL, echo=False)
|
test_engine = create_async_engine(TEST_DATABASE_URL, echo=False)
|
||||||
TestSessionLocal = sessionmaker(
|
TestSessionLocal = sessionmaker(test_engine, class_=AsyncSession, expire_on_commit=False)
|
||||||
test_engine, class_=AsyncSession, expire_on_commit=False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def event_loop():
|
def event_loop():
|
||||||
@@ -37,28 +35,13 @@ def event_loop():
|
|||||||
yield loop
|
yield loop
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
async def setup_test_db():
|
async def setup_test_db():
|
||||||
"""Set up test database tables."""
|
"""Set up test database tables."""
|
||||||
|
|
||||||
# Create all tables
|
# Create all tables
|
||||||
async with test_engine.begin() as conn:
|
async with test_engine.begin() as conn:
|
||||||
await conn.run_sync(Base.metadata.drop_all)
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
# Enable RLS and create policies
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE applications ENABLE ROW LEVEL SECURITY;
|
|
||||||
|
|
||||||
DROP POLICY IF EXISTS applications_user_isolation ON applications;
|
|
||||||
CREATE POLICY applications_user_isolation ON applications
|
|
||||||
FOR ALL TO authenticated
|
|
||||||
USING (user_id = current_setting('app.current_user_id')::UUID);
|
|
||||||
|
|
||||||
-- Create vector extension if needed
|
|
||||||
CREATE EXTENSION IF NOT EXISTS vector;
|
|
||||||
""")
|
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
@@ -66,22 +49,18 @@ async def setup_test_db():
|
|||||||
async with test_engine.begin() as conn:
|
async with test_engine.begin() as conn:
|
||||||
await conn.run_sync(Base.metadata.drop_all)
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def test_db(setup_test_db) -> AsyncGenerator[AsyncSession, None]:
|
async def test_db(setup_test_db) -> AsyncGenerator[AsyncSession, None]:
|
||||||
"""Create a test database session."""
|
"""Create a test database session."""
|
||||||
|
|
||||||
async with TestSessionLocal() as session:
|
async with TestSessionLocal() as session:
|
||||||
try:
|
try:
|
||||||
yield session
|
yield session
|
||||||
finally:
|
finally:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def override_get_db(test_db: AsyncSession):
|
def override_get_db(test_db: AsyncSession):
|
||||||
"""Override the get_db dependency for testing."""
|
"""Override the get_db dependency for testing."""
|
||||||
|
|
||||||
def _override_get_db():
|
def _override_get_db():
|
||||||
return test_db
|
return test_db
|
||||||
|
|
||||||
@@ -89,147 +68,72 @@ def override_get_db(test_db: AsyncSession):
|
|||||||
yield
|
yield
|
||||||
app.dependency_overrides.clear()
|
app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def test_client(override_get_db):
|
def test_client(override_get_db):
|
||||||
"""Create a test client."""
|
"""Create a test client."""
|
||||||
|
|
||||||
with TestClient(app) as client:
|
with TestClient(app) as client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def async_client(override_get_db):
|
async def async_client(override_get_db):
|
||||||
"""Create an async test client."""
|
"""Create an async test client."""
|
||||||
|
|
||||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def test_user(test_db: AsyncSession):
|
async def test_user(test_db: AsyncSession):
|
||||||
"""Create a test user."""
|
"""Create a test user."""
|
||||||
|
user = User(
|
||||||
from app.crud.user import create_user
|
|
||||||
from app.schemas.user import UserCreate
|
|
||||||
|
|
||||||
user_data = UserCreate(
|
|
||||||
email="test@jobforge.com",
|
email="test@jobforge.com",
|
||||||
password="testpassword123",
|
password_hash=hash_password("testpassword123"),
|
||||||
first_name="Test",
|
full_name="Test User"
|
||||||
last_name="User"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
user = await create_user(test_db, user_data)
|
test_db.add(user)
|
||||||
await test_db.commit()
|
await test_db.commit()
|
||||||
|
await test_db.refresh(user)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def test_user_token(test_user):
|
def test_user_token(test_user):
|
||||||
"""Create a JWT token for test user."""
|
"""Create a JWT token for test user."""
|
||||||
|
token_data = {"sub": str(test_user.id)}
|
||||||
token_data = {"sub": str(test_user.id), "email": test_user.email}
|
|
||||||
return create_access_token(data=token_data)
|
return create_access_token(data=token_data)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def test_application(test_db: AsyncSession, test_user):
|
def mock_ai_service():
|
||||||
"""Create a test job application."""
|
"""Mock AI service for testing."""
|
||||||
|
mock = Mock()
|
||||||
from app.crud.application import create_application
|
mock.generate_cover_letter = AsyncMock(return_value={
|
||||||
from app.schemas.application import ApplicationCreate
|
"content": "Dear Hiring Manager,\n\nI am writing to express my interest...\n\nBest regards,\nTest User",
|
||||||
|
"model_used": "mock-ai",
|
||||||
app_data = ApplicationCreate(
|
"prompt": "Mock prompt for testing"
|
||||||
company_name="Test Corp",
|
})
|
||||||
role_title="Software Developer",
|
mock.generate_resume_optimization = AsyncMock(return_value={
|
||||||
job_description="Python developer position with FastAPI experience",
|
"content": "Optimized Resume\n\nTest User\nSoftware Engineer\n\nExperience optimized for target role...",
|
||||||
status="draft"
|
"model_used": "mock-ai",
|
||||||
)
|
"prompt": "Mock resume optimization prompt"
|
||||||
|
})
|
||||||
application = await create_application(test_db, app_data, test_user.id)
|
|
||||||
await test_db.commit()
|
|
||||||
return application
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_claude_service():
|
|
||||||
"""Mock Claude AI service."""
|
|
||||||
|
|
||||||
mock = AsyncMock()
|
|
||||||
mock.generate_cover_letter.return_value = """
|
|
||||||
Dear Hiring Manager,
|
|
||||||
|
|
||||||
I am writing to express my strong interest in the Software Developer position at Test Corp.
|
|
||||||
With my experience in Python development and FastAPI expertise, I am confident I would be
|
|
||||||
a valuable addition to your team.
|
|
||||||
|
|
||||||
Thank you for your consideration.
|
|
||||||
|
|
||||||
Sincerely,
|
|
||||||
Test User
|
|
||||||
"""
|
|
||||||
|
|
||||||
return mock
|
return mock
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_openai_service():
|
|
||||||
"""Mock OpenAI service."""
|
|
||||||
|
|
||||||
mock = AsyncMock()
|
|
||||||
mock.create_embedding.return_value = [0.1] * 1536 # Mock embedding vector
|
|
||||||
mock.test_connection.return_value = True
|
|
||||||
|
|
||||||
return mock
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def multiple_test_users(test_db: AsyncSession):
|
async def multiple_test_users(test_db: AsyncSession):
|
||||||
"""Create multiple test users for isolation testing."""
|
"""Create multiple test users for testing."""
|
||||||
|
|
||||||
from app.crud.user import create_user
|
|
||||||
from app.schemas.user import UserCreate
|
|
||||||
|
|
||||||
users = []
|
users = []
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
user_data = UserCreate(
|
user = User(
|
||||||
email=f"user{i}@test.com",
|
email=f"user{i}@test.com",
|
||||||
password="password123",
|
password_hash=hash_password("password123"),
|
||||||
first_name=f"User{i}",
|
full_name=f"User {i} Test"
|
||||||
last_name="Test"
|
|
||||||
)
|
)
|
||||||
user = await create_user(test_db, user_data)
|
test_db.add(user)
|
||||||
users.append(user)
|
users.append(user)
|
||||||
|
|
||||||
await test_db.commit()
|
await test_db.commit()
|
||||||
|
for user in users:
|
||||||
|
await test_db.refresh(user)
|
||||||
return users
|
return users
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
async def applications_for_users(test_db: AsyncSession, multiple_test_users):
|
|
||||||
"""Create applications for multiple users to test isolation."""
|
|
||||||
|
|
||||||
from app.crud.application import create_application
|
|
||||||
from app.schemas.application import ApplicationCreate
|
|
||||||
|
|
||||||
all_applications = []
|
|
||||||
|
|
||||||
for i, user in enumerate(multiple_test_users):
|
|
||||||
for j in range(2): # 2 applications per user
|
|
||||||
app_data = ApplicationCreate(
|
|
||||||
company_name=f"Company{i}-{j}",
|
|
||||||
role_title=f"Role{i}-{j}",
|
|
||||||
job_description=f"Job description for user {i}, application {j}",
|
|
||||||
status="draft"
|
|
||||||
)
|
|
||||||
application = await create_application(test_db, app_data, user.id)
|
|
||||||
all_applications.append(application)
|
|
||||||
|
|
||||||
await test_db.commit()
|
|
||||||
return all_applications
|
|
||||||
|
|
||||||
|
|
||||||
# Test data factories
|
# Test data factories
|
||||||
class TestDataFactory:
|
class TestDataFactory:
|
||||||
"""Factory for creating test data."""
|
"""Factory for creating test data."""
|
||||||
@@ -239,87 +143,46 @@ class TestDataFactory:
|
|||||||
"""Create user test data."""
|
"""Create user test data."""
|
||||||
return {
|
return {
|
||||||
"email": email or "test@example.com",
|
"email": email or "test@example.com",
|
||||||
"password": "testpassword123",
|
"password": "testpassword123",
|
||||||
"first_name": "Test",
|
"first_name": "Test",
|
||||||
"last_name": "User",
|
"last_name": "User",
|
||||||
**kwargs
|
**kwargs
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def application_data(company_name: str = None, **kwargs):
|
def cover_letter_request(**kwargs):
|
||||||
"""Create application test data."""
|
"""Create cover letter request data."""
|
||||||
return {
|
return {
|
||||||
"company_name": company_name or "Test Company",
|
"job_description": "We are looking for a Software Engineer with Python experience",
|
||||||
"role_title": "Software Developer",
|
"company_name": "Test Company",
|
||||||
"job_description": "Python developer position",
|
"role_title": "Software Engineer",
|
||||||
"status": "draft",
|
|
||||||
**kwargs
|
**kwargs
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ai_response():
|
def resume_optimization_request(**kwargs):
|
||||||
"""Create mock AI response."""
|
"""Create resume optimization request data."""
|
||||||
return """
|
return {
|
||||||
Dear Hiring Manager,
|
"current_resume": "John Doe\nSoftware Engineer\n\nExperience:\n- Python development\n- Web applications",
|
||||||
|
"job_description": "Senior Python Developer role requiring FastAPI experience",
|
||||||
I am excited to apply for this position. My background in software development
|
"role_title": "Senior Python Developer",
|
||||||
and passion for technology make me an ideal candidate.
|
**kwargs
|
||||||
|
}
|
||||||
Best regards,
|
|
||||||
Test User
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
# Helper functions
|
||||||
# Database utilities for testing
|
|
||||||
async def create_test_user_and_token(db: AsyncSession, email: str):
|
async def create_test_user_and_token(db: AsyncSession, email: str):
|
||||||
"""Helper to create a user and return auth token."""
|
"""Helper to create a user and return auth token."""
|
||||||
|
user = User(
|
||||||
from app.crud.user import create_user
|
|
||||||
from app.schemas.user import UserCreate
|
|
||||||
|
|
||||||
user_data = UserCreate(
|
|
||||||
email=email,
|
email=email,
|
||||||
password="password123",
|
password_hash=hash_password("password123"),
|
||||||
first_name="Test",
|
full_name="Test User"
|
||||||
last_name="User"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
user = await create_user(db, user_data)
|
db.add(user)
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
await db.refresh(user)
|
||||||
|
|
||||||
token_data = {"sub": str(user.id), "email": user.email}
|
token_data = {"sub": str(user.id)}
|
||||||
token = create_access_token(data=token_data)
|
token = create_access_token(data=token_data)
|
||||||
|
|
||||||
return user, token
|
return user, token
|
||||||
|
|
||||||
|
|
||||||
async def set_rls_context(db: AsyncSession, user_id: str):
|
|
||||||
"""Set RLS context for testing multi-tenancy."""
|
|
||||||
|
|
||||||
await db.execute(f"SET app.current_user_id = '{user_id}'")
|
|
||||||
|
|
||||||
|
|
||||||
# Performance testing helpers
|
|
||||||
@pytest.fixture
|
|
||||||
def benchmark_db_operations():
|
|
||||||
"""Benchmark database operations."""
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
class BenchmarkContext:
|
|
||||||
def __init__(self):
|
|
||||||
self.start_time = None
|
|
||||||
self.end_time = None
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.start_time = time.time()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.end_time = time.time()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def duration(self):
|
|
||||||
return self.end_time - self.start_time if self.end_time else None
|
|
||||||
|
|
||||||
return BenchmarkContext
|
|
||||||
325
tests/conftest_old.py
Normal file
325
tests/conftest_old.py
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
# Test configuration for Job Forge
|
||||||
|
import pytest
|
||||||
|
import asyncio
|
||||||
|
import asyncpg
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from httpx import AsyncClient
|
||||||
|
import os
|
||||||
|
from typing import AsyncGenerator
|
||||||
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
|
from app.main import app
|
||||||
|
from app.core.database import get_db, Base
|
||||||
|
from app.core.security import create_access_token
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.application import Application
|
||||||
|
|
||||||
|
|
||||||
|
# Test database URL
|
||||||
|
TEST_DATABASE_URL = os.getenv(
|
||||||
|
"TEST_DATABASE_URL",
|
||||||
|
"postgresql+asyncpg://jobforge:jobforge123@localhost:5432/jobforge_test"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test engine and session factory
|
||||||
|
test_engine = create_async_engine(TEST_DATABASE_URL, echo=False)
|
||||||
|
TestSessionLocal = sessionmaker(
|
||||||
|
test_engine, class_=AsyncSession, expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def event_loop():
|
||||||
|
"""Create an instance of the default event loop for the test session."""
|
||||||
|
loop = asyncio.get_event_loop_policy().new_event_loop()
|
||||||
|
yield loop
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
async def setup_test_db():
|
||||||
|
"""Set up test database tables."""
|
||||||
|
|
||||||
|
# Create all tables
|
||||||
|
async with test_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
# Enable RLS and create policies
|
||||||
|
await conn.execute("""
|
||||||
|
ALTER TABLE applications ENABLE ROW LEVEL SECURITY;
|
||||||
|
|
||||||
|
DROP POLICY IF EXISTS applications_user_isolation ON applications;
|
||||||
|
CREATE POLICY applications_user_isolation ON applications
|
||||||
|
FOR ALL TO authenticated
|
||||||
|
USING (user_id = current_setting('app.current_user_id')::UUID);
|
||||||
|
|
||||||
|
-- Create vector extension if needed
|
||||||
|
CREATE EXTENSION IF NOT EXISTS vector;
|
||||||
|
""")
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
async with test_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_db(setup_test_db) -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
"""Create a test database session."""
|
||||||
|
|
||||||
|
async with TestSessionLocal() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
finally:
|
||||||
|
await session.rollback()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def override_get_db(test_db: AsyncSession):
|
||||||
|
"""Override the get_db dependency for testing."""
|
||||||
|
|
||||||
|
def _override_get_db():
|
||||||
|
return test_db
|
||||||
|
|
||||||
|
app.dependency_overrides[get_db] = _override_get_db
|
||||||
|
yield
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_client(override_get_db):
|
||||||
|
"""Create a test client."""
|
||||||
|
|
||||||
|
with TestClient(app) as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def async_client(override_get_db):
|
||||||
|
"""Create an async test client."""
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_user(test_db: AsyncSession):
|
||||||
|
"""Create a test user."""
|
||||||
|
|
||||||
|
from app.crud.user import create_user
|
||||||
|
from app.schemas.user import UserCreate
|
||||||
|
|
||||||
|
user_data = UserCreate(
|
||||||
|
email="test@jobforge.com",
|
||||||
|
password="testpassword123",
|
||||||
|
first_name="Test",
|
||||||
|
last_name="User"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = await create_user(test_db, user_data)
|
||||||
|
await test_db.commit()
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_user_token(test_user):
|
||||||
|
"""Create a JWT token for test user."""
|
||||||
|
|
||||||
|
token_data = {"sub": str(test_user.id), "email": test_user.email}
|
||||||
|
return create_access_token(data=token_data)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_application(test_db: AsyncSession, test_user):
|
||||||
|
"""Create a test job application."""
|
||||||
|
|
||||||
|
from app.crud.application import create_application
|
||||||
|
from app.schemas.application import ApplicationCreate
|
||||||
|
|
||||||
|
app_data = ApplicationCreate(
|
||||||
|
company_name="Test Corp",
|
||||||
|
role_title="Software Developer",
|
||||||
|
job_description="Python developer position with FastAPI experience",
|
||||||
|
status="draft"
|
||||||
|
)
|
||||||
|
|
||||||
|
application = await create_application(test_db, app_data, test_user.id)
|
||||||
|
await test_db.commit()
|
||||||
|
return application
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_claude_service():
|
||||||
|
"""Mock Claude AI service."""
|
||||||
|
|
||||||
|
mock = AsyncMock()
|
||||||
|
mock.generate_cover_letter.return_value = """
|
||||||
|
Dear Hiring Manager,
|
||||||
|
|
||||||
|
I am writing to express my strong interest in the Software Developer position at Test Corp.
|
||||||
|
With my experience in Python development and FastAPI expertise, I am confident I would be
|
||||||
|
a valuable addition to your team.
|
||||||
|
|
||||||
|
Thank you for your consideration.
|
||||||
|
|
||||||
|
Sincerely,
|
||||||
|
Test User
|
||||||
|
"""
|
||||||
|
|
||||||
|
return mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_openai_service():
|
||||||
|
"""Mock OpenAI service."""
|
||||||
|
|
||||||
|
mock = AsyncMock()
|
||||||
|
mock.create_embedding.return_value = [0.1] * 1536 # Mock embedding vector
|
||||||
|
mock.test_connection.return_value = True
|
||||||
|
|
||||||
|
return mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def multiple_test_users(test_db: AsyncSession):
|
||||||
|
"""Create multiple test users for isolation testing."""
|
||||||
|
|
||||||
|
from app.crud.user import create_user
|
||||||
|
from app.schemas.user import UserCreate
|
||||||
|
|
||||||
|
users = []
|
||||||
|
for i in range(3):
|
||||||
|
user_data = UserCreate(
|
||||||
|
email=f"user{i}@test.com",
|
||||||
|
password="password123",
|
||||||
|
first_name=f"User{i}",
|
||||||
|
last_name="Test"
|
||||||
|
)
|
||||||
|
user = await create_user(test_db, user_data)
|
||||||
|
users.append(user)
|
||||||
|
|
||||||
|
await test_db.commit()
|
||||||
|
return users
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def applications_for_users(test_db: AsyncSession, multiple_test_users):
|
||||||
|
"""Create applications for multiple users to test isolation."""
|
||||||
|
|
||||||
|
from app.crud.application import create_application
|
||||||
|
from app.schemas.application import ApplicationCreate
|
||||||
|
|
||||||
|
all_applications = []
|
||||||
|
|
||||||
|
for i, user in enumerate(multiple_test_users):
|
||||||
|
for j in range(2): # 2 applications per user
|
||||||
|
app_data = ApplicationCreate(
|
||||||
|
company_name=f"Company{i}-{j}",
|
||||||
|
role_title=f"Role{i}-{j}",
|
||||||
|
job_description=f"Job description for user {i}, application {j}",
|
||||||
|
status="draft"
|
||||||
|
)
|
||||||
|
application = await create_application(test_db, app_data, user.id)
|
||||||
|
all_applications.append(application)
|
||||||
|
|
||||||
|
await test_db.commit()
|
||||||
|
return all_applications
|
||||||
|
|
||||||
|
|
||||||
|
# Test data factories
|
||||||
|
class TestDataFactory:
|
||||||
|
"""Factory for creating test data."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def user_data(email: str = None, **kwargs):
|
||||||
|
"""Create user test data."""
|
||||||
|
return {
|
||||||
|
"email": email or "test@example.com",
|
||||||
|
"password": "testpassword123",
|
||||||
|
"first_name": "Test",
|
||||||
|
"last_name": "User",
|
||||||
|
**kwargs
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def application_data(company_name: str = None, **kwargs):
|
||||||
|
"""Create application test data."""
|
||||||
|
return {
|
||||||
|
"company_name": company_name or "Test Company",
|
||||||
|
"role_title": "Software Developer",
|
||||||
|
"job_description": "Python developer position",
|
||||||
|
"status": "draft",
|
||||||
|
**kwargs
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ai_response():
|
||||||
|
"""Create mock AI response."""
|
||||||
|
return """
|
||||||
|
Dear Hiring Manager,
|
||||||
|
|
||||||
|
I am excited to apply for this position. My background in software development
|
||||||
|
and passion for technology make me an ideal candidate.
|
||||||
|
|
||||||
|
Best regards,
|
||||||
|
Test User
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# Database utilities for testing
|
||||||
|
async def create_test_user_and_token(db: AsyncSession, email: str):
|
||||||
|
"""Helper to create a user and return auth token."""
|
||||||
|
|
||||||
|
from app.crud.user import create_user
|
||||||
|
from app.schemas.user import UserCreate
|
||||||
|
|
||||||
|
user_data = UserCreate(
|
||||||
|
email=email,
|
||||||
|
password="password123",
|
||||||
|
first_name="Test",
|
||||||
|
last_name="User"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = await create_user(db, user_data)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
token_data = {"sub": str(user.id), "email": user.email}
|
||||||
|
token = create_access_token(data=token_data)
|
||||||
|
|
||||||
|
return user, token
|
||||||
|
|
||||||
|
|
||||||
|
async def set_rls_context(db: AsyncSession, user_id: str):
|
||||||
|
"""Set RLS context for testing multi-tenancy."""
|
||||||
|
|
||||||
|
await db.execute(f"SET app.current_user_id = '{user_id}'")
|
||||||
|
|
||||||
|
|
||||||
|
# Performance testing helpers
|
||||||
|
@pytest.fixture
|
||||||
|
def benchmark_db_operations():
|
||||||
|
"""Benchmark database operations."""
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
class BenchmarkContext:
|
||||||
|
def __init__(self):
|
||||||
|
self.start_time = None
|
||||||
|
self.end_time = None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.start_time = time.time()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *args):
|
||||||
|
self.end_time = time.time()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def duration(self):
|
||||||
|
return self.end_time - self.start_time if self.end_time else None
|
||||||
|
|
||||||
|
return BenchmarkContext
|
||||||
1
tests/integration/__init__.py
Normal file
1
tests/integration/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Integration tests package
|
||||||
455
tests/integration/test_ai_api_integration.py
Normal file
455
tests/integration/test_ai_api_integration.py
Normal file
@@ -0,0 +1,455 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for AI API endpoints
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from unittest.mock import patch, AsyncMock
|
||||||
|
|
||||||
|
from src.backend.main import app
|
||||||
|
|
||||||
|
|
||||||
|
class TestAIDocumentEndpoints:
|
||||||
|
"""Test AI document generation API endpoints."""
|
||||||
|
|
||||||
|
def test_generate_cover_letter_success(self, test_client, test_user_token):
|
||||||
|
"""Test successful cover letter generation."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "We are looking for a Senior Python Developer with FastAPI experience and PostgreSQL knowledge. The ideal candidate will have 5+ years of experience.",
|
||||||
|
"company_name": "TechCorp Industries",
|
||||||
|
"role_title": "Senior Python Developer"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
assert "content" in data
|
||||||
|
assert "model_used" in data
|
||||||
|
assert "generation_prompt" in data
|
||||||
|
|
||||||
|
# Verify content includes relevant information
|
||||||
|
assert "TechCorp Industries" in data["content"]
|
||||||
|
assert "Senior Python Developer" in data["content"]
|
||||||
|
assert len(data["content"]) > 100 # Should be substantial
|
||||||
|
|
||||||
|
# Should use template fallback without API keys
|
||||||
|
assert data["model_used"] == "template"
|
||||||
|
|
||||||
|
def test_generate_cover_letter_with_resume(self, test_client, test_user_token):
|
||||||
|
"""Test cover letter generation with user resume included."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Python developer role requiring Django experience",
|
||||||
|
"company_name": "Resume Corp",
|
||||||
|
"role_title": "Python Developer",
|
||||||
|
"user_resume": "John Doe\nSoftware Engineer\n\nExperience:\n- 5 years Python development\n- Django and Flask frameworks\n- PostgreSQL databases"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
assert "Resume Corp" in data["content"]
|
||||||
|
# Prompt should reference the resume
|
||||||
|
assert "Resume/Background" in data["generation_prompt"]
|
||||||
|
|
||||||
|
def test_generate_cover_letter_unauthorized(self, test_client):
|
||||||
|
"""Test cover letter generation without authentication."""
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Test job",
|
||||||
|
"company_name": "Test Corp",
|
||||||
|
"role_title": "Test Role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/ai/generate-cover-letter", json=request_data)
|
||||||
|
|
||||||
|
assert response.status_code == 403 # HTTPBearer returns 403
|
||||||
|
|
||||||
|
def test_generate_cover_letter_invalid_token(self, test_client):
|
||||||
|
"""Test cover letter generation with invalid token."""
|
||||||
|
headers = {"Authorization": "Bearer invalid.token.here"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Test job",
|
||||||
|
"company_name": "Test Corp",
|
||||||
|
"role_title": "Test Role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
def test_generate_cover_letter_missing_fields(self, test_client, test_user_token):
|
||||||
|
"""Test cover letter generation with missing required fields."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Test job",
|
||||||
|
# Missing company_name and role_title
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 422 # Validation error
|
||||||
|
|
||||||
|
def test_optimize_resume_success(self, test_client, test_user_token):
|
||||||
|
"""Test successful resume optimization."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"current_resume": """
|
||||||
|
John Smith
|
||||||
|
Software Engineer
|
||||||
|
|
||||||
|
Experience:
|
||||||
|
- 3 years Python development
|
||||||
|
- Built REST APIs using Flask
|
||||||
|
- Database management with MySQL
|
||||||
|
- Team collaboration and code reviews
|
||||||
|
""",
|
||||||
|
"job_description": "Senior Python Developer role requiring FastAPI, PostgreSQL, and AI/ML experience. Must have 5+ years of experience.",
|
||||||
|
"role_title": "Senior Python Developer"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/optimize-resume",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
assert "content" in data
|
||||||
|
assert "model_used" in data
|
||||||
|
assert "generation_prompt" in data
|
||||||
|
|
||||||
|
# Should include original resume content
|
||||||
|
assert "John Smith" in data["content"]
|
||||||
|
assert "Senior Python Developer" in data["content"]
|
||||||
|
assert data["model_used"] == "template"
|
||||||
|
|
||||||
|
def test_optimize_resume_unauthorized(self, test_client):
|
||||||
|
"""Test resume optimization without authentication."""
|
||||||
|
request_data = {
|
||||||
|
"current_resume": "Test resume",
|
||||||
|
"job_description": "Test job",
|
||||||
|
"role_title": "Test role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/ai/optimize-resume", json=request_data)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
def test_test_ai_connection_success(self, test_client, test_user_token):
|
||||||
|
"""Test AI connection test endpoint."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
|
||||||
|
response = test_client.post("/api/ai/test-ai-connection", headers=headers)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
assert "claude_available" in data
|
||||||
|
assert "openai_available" in data
|
||||||
|
assert "user" in data
|
||||||
|
assert "test_generation" in data
|
||||||
|
|
||||||
|
# Without API keys, should show unavailable but test should succeed
|
||||||
|
assert data["claude_available"] == False
|
||||||
|
assert data["openai_available"] == False
|
||||||
|
assert data["test_generation"] == "success"
|
||||||
|
assert data["model_used"] == "template"
|
||||||
|
assert "content_preview" in data
|
||||||
|
|
||||||
|
def test_test_ai_connection_unauthorized(self, test_client):
|
||||||
|
"""Test AI connection test without authentication."""
|
||||||
|
response = test_client.post("/api/ai/test-ai-connection")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestAIAPIErrorHandling:
|
||||||
|
"""Test error handling in AI API endpoints."""
|
||||||
|
|
||||||
|
@patch('src.backend.services.ai_service.ai_service.generate_cover_letter')
|
||||||
|
def test_cover_letter_generation_service_error(self, mock_generate, test_client, test_user_token):
|
||||||
|
"""Test cover letter generation when AI service fails."""
|
||||||
|
# Mock the service to raise an exception
|
||||||
|
mock_generate.side_effect = Exception("AI service unavailable")
|
||||||
|
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Test job",
|
||||||
|
"company_name": "Error Corp",
|
||||||
|
"role_title": "Test Role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 500
|
||||||
|
data = response.json()
|
||||||
|
assert "Failed to generate cover letter" in data["detail"]
|
||||||
|
|
||||||
|
@patch('src.backend.services.ai_service.ai_service.generate_resume_optimization')
|
||||||
|
def test_resume_optimization_service_error(self, mock_optimize, test_client, test_user_token):
|
||||||
|
"""Test resume optimization when AI service fails."""
|
||||||
|
mock_optimize.side_effect = Exception("Service error")
|
||||||
|
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"current_resume": "Test resume",
|
||||||
|
"job_description": "Test job",
|
||||||
|
"role_title": "Test role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/optimize-resume",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 500
|
||||||
|
data = response.json()
|
||||||
|
assert "Failed to optimize resume" in data["detail"]
|
||||||
|
|
||||||
|
def test_cover_letter_with_large_payload(self, test_client, test_user_token):
|
||||||
|
"""Test cover letter generation with very large job description."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
|
||||||
|
# Create a very large job description
|
||||||
|
large_description = "A" * 50000 # 50KB of text
|
||||||
|
|
||||||
|
request_data = {
|
||||||
|
"job_description": large_description,
|
||||||
|
"company_name": "Large Corp",
|
||||||
|
"role_title": "Big Role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should handle large payloads gracefully
|
||||||
|
assert response.status_code in [200, 413, 422] # Success or payload too large
|
||||||
|
|
||||||
|
def test_resume_optimization_empty_resume(self, test_client, test_user_token):
|
||||||
|
"""Test resume optimization with empty resume."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"current_resume": "",
|
||||||
|
"job_description": "Test job description",
|
||||||
|
"role_title": "Test Role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/optimize-resume",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should handle empty resume
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "content" in data
|
||||||
|
|
||||||
|
|
||||||
|
class TestAIAPIValidation:
|
||||||
|
"""Test input validation for AI API endpoints."""
|
||||||
|
|
||||||
|
def test_cover_letter_invalid_email_in_description(self, test_client, test_user_token):
|
||||||
|
"""Test cover letter generation with invalid characters."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Job with special chars: <script>alert('xss')</script>",
|
||||||
|
"company_name": "Security Corp",
|
||||||
|
"role_title": "Security Engineer"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should sanitize or handle special characters
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
# The script tag should not be executed (this is handled by the template)
|
||||||
|
assert "Security Corp" in data["content"]
|
||||||
|
|
||||||
|
def test_resume_optimization_unicode_content(self, test_client, test_user_token):
|
||||||
|
"""Test resume optimization with unicode characters."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"current_resume": "José González\nSoftware Engineer\n• 5 años de experiencia",
|
||||||
|
"job_description": "Seeking bilingual developer",
|
||||||
|
"role_title": "Desarrollador Senior"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/optimize-resume",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "José González" in data["content"]
|
||||||
|
|
||||||
|
def test_cover_letter_null_values(self, test_client, test_user_token):
|
||||||
|
"""Test cover letter generation with null values in optional fields."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Test job description",
|
||||||
|
"company_name": "Null Corp",
|
||||||
|
"role_title": "Null Role",
|
||||||
|
"job_url": None,
|
||||||
|
"user_resume": None
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "Null Corp" in data["content"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestAIAPIPerformance:
|
||||||
|
"""Test performance aspects of AI API endpoints."""
|
||||||
|
|
||||||
|
def test_concurrent_cover_letter_requests(self, test_client, test_user_token):
|
||||||
|
"""Test multiple concurrent cover letter requests."""
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
|
||||||
|
def make_request(index):
|
||||||
|
request_data = {
|
||||||
|
"job_description": f"Job description {index}",
|
||||||
|
"company_name": f"Company {index}",
|
||||||
|
"role_title": f"Role {index}"
|
||||||
|
}
|
||||||
|
return test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make 5 concurrent requests
|
||||||
|
start_time = time.time()
|
||||||
|
threads = []
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for i in range(5):
|
||||||
|
thread = threading.Thread(target=lambda i=i: results.append(make_request(i)))
|
||||||
|
threads.append(thread)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
for thread in threads:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
end_time = time.time()
|
||||||
|
|
||||||
|
# All requests should succeed
|
||||||
|
assert len(results) == 5
|
||||||
|
for response in results:
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Should complete in reasonable time (less than 10 seconds for template generation)
|
||||||
|
assert end_time - start_time < 10
|
||||||
|
|
||||||
|
def test_response_time_cover_letter(self, test_client, test_user_token):
|
||||||
|
"""Test response time for cover letter generation."""
|
||||||
|
import time
|
||||||
|
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Standard Python developer position",
|
||||||
|
"company_name": "Performance Corp",
|
||||||
|
"role_title": "Python Developer"
|
||||||
|
}
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
response = test_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
end_time = time.time()
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Template generation should be fast (less than 1 second)
|
||||||
|
response_time = end_time - start_time
|
||||||
|
assert response_time < 1.0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
class TestAIAPIAsync:
|
||||||
|
"""Test AI API endpoints with async client."""
|
||||||
|
|
||||||
|
async def test_async_cover_letter_generation(self, async_client, test_user_token):
|
||||||
|
"""Test cover letter generation with async client."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"job_description": "Async job description",
|
||||||
|
"company_name": "Async Corp",
|
||||||
|
"role_title": "Async Developer"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/ai/generate-cover-letter",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "Async Corp" in data["content"]
|
||||||
|
|
||||||
|
async def test_async_resume_optimization(self, async_client, test_user_token):
|
||||||
|
"""Test resume optimization with async client."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
request_data = {
|
||||||
|
"current_resume": "Async resume content",
|
||||||
|
"job_description": "Async job requirements",
|
||||||
|
"role_title": "Async Role"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/ai/optimize-resume",
|
||||||
|
json=request_data,
|
||||||
|
headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "Async resume content" in data["content"]
|
||||||
1
tests/unit/__init__.py
Normal file
1
tests/unit/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Unit tests package
|
||||||
405
tests/unit/test_ai_service.py
Normal file
405
tests/unit/test_ai_service.py
Normal file
@@ -0,0 +1,405 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for AI document generation service
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import AsyncMock, Mock, patch
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from src.backend.services.ai_service import AIService, ai_service
|
||||||
|
|
||||||
|
|
||||||
|
class TestAIService:
|
||||||
|
"""Test AI Service functionality."""
|
||||||
|
|
||||||
|
def test_ai_service_initialization(self):
|
||||||
|
"""Test AI service initializes correctly."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
# Without API keys, clients should be None
|
||||||
|
assert service.claude_client is None
|
||||||
|
assert service.openai_client is None
|
||||||
|
|
||||||
|
@patch('src.backend.services.ai_service.settings')
|
||||||
|
def test_ai_service_with_claude_key(self, mock_settings):
|
||||||
|
"""Test AI service initialization with Claude API key."""
|
||||||
|
mock_settings.claude_api_key = "test-claude-key"
|
||||||
|
mock_settings.openai_api_key = None
|
||||||
|
|
||||||
|
with patch('src.backend.services.ai_service.anthropic.Anthropic') as mock_anthropic:
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
mock_anthropic.assert_called_once_with(api_key="test-claude-key")
|
||||||
|
assert service.claude_client is not None
|
||||||
|
assert service.openai_client is None
|
||||||
|
|
||||||
|
@patch('src.backend.services.ai_service.settings')
|
||||||
|
def test_ai_service_with_openai_key(self, mock_settings):
|
||||||
|
"""Test AI service initialization with OpenAI API key."""
|
||||||
|
mock_settings.claude_api_key = None
|
||||||
|
mock_settings.openai_api_key = "test-openai-key"
|
||||||
|
|
||||||
|
with patch('src.backend.services.ai_service.openai.AsyncOpenAI') as mock_openai:
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
mock_openai.assert_called_once_with(api_key="test-openai-key")
|
||||||
|
assert service.claude_client is None
|
||||||
|
assert service.openai_client is not None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_cover_letter_template_fallback(self):
|
||||||
|
"""Test cover letter generation with template fallback."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
result = await service.generate_cover_letter(
|
||||||
|
job_description="Python developer position requiring FastAPI skills",
|
||||||
|
company_name="Tech Corp",
|
||||||
|
role_title="Senior Python Developer",
|
||||||
|
user_name="John Doe"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "content" in result
|
||||||
|
assert "model_used" in result
|
||||||
|
assert "prompt" in result
|
||||||
|
|
||||||
|
assert result["model_used"] == "template"
|
||||||
|
assert "Tech Corp" in result["content"]
|
||||||
|
assert "Senior Python Developer" in result["content"]
|
||||||
|
assert "John Doe" in result["content"]
|
||||||
|
assert "Dear Hiring Manager" in result["content"]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_cover_letter_with_claude(self):
|
||||||
|
"""Test cover letter generation with Claude API."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
# Mock Claude client
|
||||||
|
mock_claude = Mock()
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.content = [Mock(text="Generated cover letter content")]
|
||||||
|
mock_claude.messages.create.return_value = mock_response
|
||||||
|
service.claude_client = mock_claude
|
||||||
|
|
||||||
|
result = await service.generate_cover_letter(
|
||||||
|
job_description="Python developer position",
|
||||||
|
company_name="Test Company",
|
||||||
|
role_title="Developer",
|
||||||
|
user_name="Test User"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["content"] == "Generated cover letter content"
|
||||||
|
assert result["model_used"] == "claude-3-haiku"
|
||||||
|
assert "prompt" in result
|
||||||
|
|
||||||
|
# Verify Claude API was called correctly
|
||||||
|
mock_claude.messages.create.assert_called_once()
|
||||||
|
call_args = mock_claude.messages.create.call_args
|
||||||
|
assert call_args[1]["model"] == "claude-3-haiku-20240307"
|
||||||
|
assert call_args[1]["max_tokens"] == 1000
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_cover_letter_with_openai(self):
|
||||||
|
"""Test cover letter generation with OpenAI API."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
# Mock OpenAI client
|
||||||
|
mock_openai = AsyncMock()
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.choices = [Mock(message=Mock(content="OpenAI generated content"))]
|
||||||
|
mock_openai.chat.completions.create.return_value = mock_response
|
||||||
|
service.openai_client = mock_openai
|
||||||
|
|
||||||
|
result = await service.generate_cover_letter(
|
||||||
|
job_description="Software engineer role",
|
||||||
|
company_name="OpenAI Corp",
|
||||||
|
role_title="Engineer",
|
||||||
|
user_name="AI User"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["content"] == "OpenAI generated content"
|
||||||
|
assert result["model_used"] == "gpt-3.5-turbo"
|
||||||
|
assert "prompt" in result
|
||||||
|
|
||||||
|
# Verify OpenAI API was called correctly
|
||||||
|
mock_openai.chat.completions.create.assert_called_once()
|
||||||
|
call_args = mock_openai.chat.completions.create.call_args
|
||||||
|
assert call_args[1]["model"] == "gpt-3.5-turbo"
|
||||||
|
assert call_args[1]["max_tokens"] == 1000
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_cover_letter_with_user_resume(self):
|
||||||
|
"""Test cover letter generation with user resume included."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
result = await service.generate_cover_letter(
|
||||||
|
job_description="Python developer position",
|
||||||
|
company_name="Resume Corp",
|
||||||
|
role_title="Developer",
|
||||||
|
user_name="Resume User",
|
||||||
|
user_resume="John Doe\nSoftware Engineer\n5 years Python experience"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should include resume information in prompt
|
||||||
|
assert "Resume/Background" in result["prompt"]
|
||||||
|
assert result["model_used"] == "template"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_resume_optimization_template(self):
|
||||||
|
"""Test resume optimization with template fallback."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
current_resume = "John Smith\nDeveloper\n\nExperience:\n- 3 years Python\n- Web development"
|
||||||
|
|
||||||
|
result = await service.generate_resume_optimization(
|
||||||
|
current_resume=current_resume,
|
||||||
|
job_description="Senior Python Developer requiring FastAPI",
|
||||||
|
role_title="Senior Python Developer"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "content" in result
|
||||||
|
assert "model_used" in result
|
||||||
|
assert "prompt" in result
|
||||||
|
|
||||||
|
assert result["model_used"] == "template"
|
||||||
|
assert "Senior Python Developer" in result["content"]
|
||||||
|
assert current_resume in result["content"]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_resume_optimization_with_ai_error(self):
|
||||||
|
"""Test resume optimization when AI service fails."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
# Mock Claude client that raises an exception
|
||||||
|
mock_claude = Mock()
|
||||||
|
mock_claude.messages.create.side_effect = Exception("API Error")
|
||||||
|
service.claude_client = mock_claude
|
||||||
|
|
||||||
|
result = await service.generate_resume_optimization(
|
||||||
|
current_resume="Test resume",
|
||||||
|
job_description="Test job",
|
||||||
|
role_title="Test role"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should fallback to template
|
||||||
|
assert result["model_used"] == "template-fallback"
|
||||||
|
assert "Test resume" in result["content"]
|
||||||
|
|
||||||
|
def test_template_cover_letter_generation(self):
|
||||||
|
"""Test template cover letter generation."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
content = service._generate_template_cover_letter(
|
||||||
|
company_name="Template Corp",
|
||||||
|
role_title="Template Role",
|
||||||
|
user_name="Template User",
|
||||||
|
job_description="Python, JavaScript, React, SQL, AWS, Docker experience required"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Template Corp" in content
|
||||||
|
assert "Template Role" in content
|
||||||
|
assert "Template User" in content
|
||||||
|
assert "Dear Hiring Manager" in content
|
||||||
|
|
||||||
|
# Should extract and include relevant skills
|
||||||
|
assert "Python" in content or "Javascript" in content
|
||||||
|
|
||||||
|
def test_template_cover_letter_no_matching_skills(self):
|
||||||
|
"""Test template cover letter when no skills match."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
content = service._generate_template_cover_letter(
|
||||||
|
company_name="No Skills Corp",
|
||||||
|
role_title="Mysterious Role",
|
||||||
|
user_name="Skill-less User",
|
||||||
|
job_description="Experience with proprietary technology XYZ required"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "No Skills Corp" in content
|
||||||
|
assert "Mysterious Role" in content
|
||||||
|
assert "Skill-less User" in content
|
||||||
|
# Should not include skill text when no matches
|
||||||
|
assert "with expertise in" not in content
|
||||||
|
|
||||||
|
|
||||||
|
class TestAIServiceIntegration:
|
||||||
|
"""Test AI service integration and edge cases."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_concurrent_cover_letter_generation(self):
|
||||||
|
"""Test concurrent cover letter generation requests."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
# Create multiple concurrent requests
|
||||||
|
tasks = [
|
||||||
|
service.generate_cover_letter(
|
||||||
|
job_description=f"Job {i} description",
|
||||||
|
company_name=f"Company {i}",
|
||||||
|
role_title=f"Role {i}",
|
||||||
|
user_name=f"User {i}"
|
||||||
|
)
|
||||||
|
for i in range(5)
|
||||||
|
]
|
||||||
|
|
||||||
|
results = await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
# All should complete successfully
|
||||||
|
assert len(results) == 5
|
||||||
|
for i, result in enumerate(results):
|
||||||
|
assert f"Company {i}" in result["content"]
|
||||||
|
assert f"Role {i}" in result["content"]
|
||||||
|
assert result["model_used"] == "template"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_cover_letter_with_empty_inputs(self):
|
||||||
|
"""Test cover letter generation with empty inputs."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
result = await service.generate_cover_letter(
|
||||||
|
job_description="",
|
||||||
|
company_name="",
|
||||||
|
role_title="",
|
||||||
|
user_name=""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should handle empty inputs gracefully
|
||||||
|
assert "content" in result
|
||||||
|
assert result["model_used"] == "template"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_cover_letter_with_very_long_inputs(self):
|
||||||
|
"""Test cover letter generation with very long inputs."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
long_description = "A" * 10000 # Very long job description
|
||||||
|
|
||||||
|
result = await service.generate_cover_letter(
|
||||||
|
job_description=long_description,
|
||||||
|
company_name="Long Corp",
|
||||||
|
role_title="Long Role",
|
||||||
|
user_name="Long User"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should handle long inputs
|
||||||
|
assert "content" in result
|
||||||
|
assert result["model_used"] == "template"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_resume_optimization_with_special_characters(self):
|
||||||
|
"""Test resume optimization with special characters."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
resume_with_special_chars = """
|
||||||
|
José González
|
||||||
|
Software Engineer
|
||||||
|
|
||||||
|
Experience:
|
||||||
|
• 5 years of Python development
|
||||||
|
• Expertise in FastAPI & PostgreSQL
|
||||||
|
• Led team of 10+ developers
|
||||||
|
"""
|
||||||
|
|
||||||
|
result = await service.generate_resume_optimization(
|
||||||
|
current_resume=resume_with_special_chars,
|
||||||
|
job_description="Senior role requiring team leadership",
|
||||||
|
role_title="Senior Developer"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "content" in result
|
||||||
|
assert "José González" in result["content"]
|
||||||
|
assert result["model_used"] == "template"
|
||||||
|
|
||||||
|
|
||||||
|
class TestAIServiceConfiguration:
|
||||||
|
"""Test AI service configuration and settings."""
|
||||||
|
|
||||||
|
@patch('src.backend.services.ai_service.settings')
|
||||||
|
def test_ai_service_singleton(self, mock_settings):
|
||||||
|
"""Test that ai_service is a singleton instance."""
|
||||||
|
# The ai_service should be the same instance
|
||||||
|
from src.backend.services.ai_service import ai_service as service1
|
||||||
|
from src.backend.services.ai_service import ai_service as service2
|
||||||
|
|
||||||
|
assert service1 is service2
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_error_handling_in_ai_generation(self):
|
||||||
|
"""Test error handling in AI generation methods."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
# Mock a client that raises an exception
|
||||||
|
service.claude_client = Mock()
|
||||||
|
service.claude_client.messages.create.side_effect = Exception("Network error")
|
||||||
|
|
||||||
|
result = await service.generate_cover_letter(
|
||||||
|
job_description="Test job",
|
||||||
|
company_name="Error Corp",
|
||||||
|
role_title="Error Role",
|
||||||
|
user_name="Error User"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should fallback gracefully
|
||||||
|
assert result["model_used"] == "template-fallback"
|
||||||
|
assert "Error Corp" in result["content"]
|
||||||
|
|
||||||
|
def test_prompt_construction(self):
|
||||||
|
"""Test that prompts are constructed correctly."""
|
||||||
|
service = AIService()
|
||||||
|
|
||||||
|
# This is tested indirectly through the template generation
|
||||||
|
content = service._generate_template_cover_letter(
|
||||||
|
company_name="Prompt Corp",
|
||||||
|
role_title="Prompt Engineer",
|
||||||
|
user_name="Prompt User",
|
||||||
|
job_description="Looking for someone with strong prompting skills"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Prompt Corp" in content
|
||||||
|
assert "Prompt Engineer" in content
|
||||||
|
assert "Prompt User" in content
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
class TestAIServiceWithRealAPIs:
|
||||||
|
"""Integration tests for AI service with real APIs (requires API keys)."""
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
not hasattr(ai_service, 'claude_client') or ai_service.claude_client is None,
|
||||||
|
reason="Claude API key not configured"
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_real_claude_api_call(self):
|
||||||
|
"""Test actual Claude API call (only runs if API key is configured)."""
|
||||||
|
result = await ai_service.generate_cover_letter(
|
||||||
|
job_description="Python developer position with FastAPI",
|
||||||
|
company_name="Real API Corp",
|
||||||
|
role_title="Python Developer",
|
||||||
|
user_name="Integration Test User"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["model_used"] == "claude-3-haiku"
|
||||||
|
assert len(result["content"]) > 100 # Should be substantial content
|
||||||
|
assert "Real API Corp" in result["content"]
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
not hasattr(ai_service, 'openai_client') or ai_service.openai_client is None,
|
||||||
|
reason="OpenAI API key not configured"
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_real_openai_api_call(self):
|
||||||
|
"""Test actual OpenAI API call (only runs if API key is configured)."""
|
||||||
|
# Temporarily disable Claude to force OpenAI usage
|
||||||
|
original_claude = ai_service.claude_client
|
||||||
|
ai_service.claude_client = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await ai_service.generate_cover_letter(
|
||||||
|
job_description="Software engineer role requiring Python",
|
||||||
|
company_name="OpenAI Test Corp",
|
||||||
|
role_title="Software Engineer",
|
||||||
|
user_name="OpenAI Test User"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["model_used"] == "gpt-3.5-turbo"
|
||||||
|
assert len(result["content"]) > 100
|
||||||
|
assert "OpenAI Test Corp" in result["content"]
|
||||||
|
finally:
|
||||||
|
ai_service.claude_client = original_claude
|
||||||
375
tests/unit/test_auth_endpoints.py
Normal file
375
tests/unit/test_auth_endpoints.py
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for authentication endpoints
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from unittest.mock import patch
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from src.backend.main import app
|
||||||
|
from src.backend.models.user import User
|
||||||
|
from src.backend.api.auth import hash_password, verify_password, create_access_token
|
||||||
|
|
||||||
|
class TestAuthenticationAPI:
|
||||||
|
"""Test authentication API endpoints."""
|
||||||
|
|
||||||
|
def test_register_user_success(self, test_client, test_db):
|
||||||
|
"""Test successful user registration."""
|
||||||
|
user_data = {
|
||||||
|
"email": "newuser@test.com",
|
||||||
|
"password": "securepassword123",
|
||||||
|
"first_name": "New",
|
||||||
|
"last_name": "User"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/register", json=user_data)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["email"] == "newuser@test.com"
|
||||||
|
assert data["full_name"] == "New User"
|
||||||
|
assert data["first_name"] == "New"
|
||||||
|
assert data["last_name"] == "User"
|
||||||
|
assert data["is_active"] == True
|
||||||
|
assert "id" in data
|
||||||
|
# Password should never be returned
|
||||||
|
assert "password" not in data
|
||||||
|
assert "password_hash" not in data
|
||||||
|
|
||||||
|
def test_register_user_duplicate_email(self, test_client, test_user):
|
||||||
|
"""Test registration with duplicate email fails."""
|
||||||
|
user_data = {
|
||||||
|
"email": test_user.email, # Use existing user's email
|
||||||
|
"password": "differentpassword",
|
||||||
|
"first_name": "Duplicate",
|
||||||
|
"last_name": "User"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/register", json=user_data)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
data = response.json()
|
||||||
|
assert "already registered" in data["detail"].lower()
|
||||||
|
|
||||||
|
def test_register_user_invalid_email(self, test_client):
|
||||||
|
"""Test registration with invalid email format."""
|
||||||
|
user_data = {
|
||||||
|
"email": "invalid-email-format",
|
||||||
|
"password": "securepassword123",
|
||||||
|
"first_name": "Invalid",
|
||||||
|
"last_name": "Email"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/register", json=user_data)
|
||||||
|
|
||||||
|
assert response.status_code == 422 # Validation error
|
||||||
|
|
||||||
|
def test_register_user_missing_fields(self, test_client):
|
||||||
|
"""Test registration with missing required fields."""
|
||||||
|
user_data = {
|
||||||
|
"email": "incomplete@test.com",
|
||||||
|
# Missing password, first_name, last_name
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/register", json=user_data)
|
||||||
|
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
def test_login_success(self, test_client, test_user):
|
||||||
|
"""Test successful login."""
|
||||||
|
login_data = {
|
||||||
|
"email": test_user.email,
|
||||||
|
"password": "testpassword123"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/login", json=login_data)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "access_token" in data
|
||||||
|
assert "token_type" in data
|
||||||
|
assert data["token_type"] == "bearer"
|
||||||
|
|
||||||
|
# Verify token structure
|
||||||
|
token = data["access_token"]
|
||||||
|
assert len(token.split('.')) == 3 # JWT has 3 parts
|
||||||
|
|
||||||
|
def test_login_wrong_password(self, test_client, test_user):
|
||||||
|
"""Test login with incorrect password."""
|
||||||
|
login_data = {
|
||||||
|
"email": test_user.email,
|
||||||
|
"password": "wrongpassword"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/login", json=login_data)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
data = response.json()
|
||||||
|
assert "incorrect" in data["detail"].lower()
|
||||||
|
|
||||||
|
def test_login_nonexistent_user(self, test_client):
|
||||||
|
"""Test login with non-existent user."""
|
||||||
|
login_data = {
|
||||||
|
"email": "nonexistent@test.com",
|
||||||
|
"password": "somepassword"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/login", json=login_data)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
def test_login_invalid_email_format(self, test_client):
|
||||||
|
"""Test login with invalid email format."""
|
||||||
|
login_data = {
|
||||||
|
"email": "not-an-email",
|
||||||
|
"password": "somepassword"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/login", json=login_data)
|
||||||
|
|
||||||
|
assert response.status_code == 422 # Validation error
|
||||||
|
|
||||||
|
def test_get_current_user_success(self, test_client, test_user_token):
|
||||||
|
"""Test getting current user with valid token."""
|
||||||
|
headers = {"Authorization": f"Bearer {test_user_token}"}
|
||||||
|
response = test_client.get("/api/auth/me", headers=headers)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "email" in data
|
||||||
|
assert "id" in data
|
||||||
|
assert "full_name" in data
|
||||||
|
assert "is_active" in data
|
||||||
|
# Ensure sensitive data is not returned
|
||||||
|
assert "password" not in data
|
||||||
|
assert "password_hash" not in data
|
||||||
|
|
||||||
|
def test_get_current_user_invalid_token(self, test_client):
|
||||||
|
"""Test getting current user with invalid token."""
|
||||||
|
headers = {"Authorization": "Bearer invalid.token.here"}
|
||||||
|
response = test_client.get("/api/auth/me", headers=headers)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
def test_get_current_user_no_token(self, test_client):
|
||||||
|
"""Test getting current user without authorization header."""
|
||||||
|
response = test_client.get("/api/auth/me")
|
||||||
|
|
||||||
|
assert response.status_code == 403 # FastAPI HTTPBearer returns 403
|
||||||
|
|
||||||
|
def test_get_current_user_malformed_header(self, test_client):
|
||||||
|
"""Test getting current user with malformed authorization header."""
|
||||||
|
malformed_headers = [
|
||||||
|
{"Authorization": "Bearer"},
|
||||||
|
{"Authorization": "NotBearer validtoken"},
|
||||||
|
{"Authorization": "Bearer "},
|
||||||
|
{"Authorization": "invalid-format"}
|
||||||
|
]
|
||||||
|
|
||||||
|
for headers in malformed_headers:
|
||||||
|
response = test_client.get("/api/auth/me", headers=headers)
|
||||||
|
assert response.status_code in [401, 403]
|
||||||
|
|
||||||
|
|
||||||
|
class TestPasswordUtilities:
|
||||||
|
"""Test password hashing and verification utilities."""
|
||||||
|
|
||||||
|
def test_hash_password(self):
|
||||||
|
"""Test password hashing function."""
|
||||||
|
password = "testpassword123"
|
||||||
|
hashed = hash_password(password)
|
||||||
|
|
||||||
|
assert hashed != password # Should be hashed
|
||||||
|
assert len(hashed) > 0
|
||||||
|
assert hashed.startswith('$2b$') # bcrypt format
|
||||||
|
|
||||||
|
def test_verify_password_correct(self):
|
||||||
|
"""Test password verification with correct password."""
|
||||||
|
password = "testpassword123"
|
||||||
|
hashed = hash_password(password)
|
||||||
|
|
||||||
|
assert verify_password(password, hashed) == True
|
||||||
|
|
||||||
|
def test_verify_password_incorrect(self):
|
||||||
|
"""Test password verification with incorrect password."""
|
||||||
|
password = "testpassword123"
|
||||||
|
wrong_password = "wrongpassword"
|
||||||
|
hashed = hash_password(password)
|
||||||
|
|
||||||
|
assert verify_password(wrong_password, hashed) == False
|
||||||
|
|
||||||
|
def test_hash_different_passwords_different_hashes(self):
|
||||||
|
"""Test that different passwords produce different hashes."""
|
||||||
|
password1 = "password123"
|
||||||
|
password2 = "password456"
|
||||||
|
|
||||||
|
hash1 = hash_password(password1)
|
||||||
|
hash2 = hash_password(password2)
|
||||||
|
|
||||||
|
assert hash1 != hash2
|
||||||
|
|
||||||
|
def test_hash_same_password_different_hashes(self):
|
||||||
|
"""Test that same password produces different hashes (salt)."""
|
||||||
|
password = "testpassword123"
|
||||||
|
|
||||||
|
hash1 = hash_password(password)
|
||||||
|
hash2 = hash_password(password)
|
||||||
|
|
||||||
|
assert hash1 != hash2 # Should be different due to salt
|
||||||
|
# But both should verify correctly
|
||||||
|
assert verify_password(password, hash1) == True
|
||||||
|
assert verify_password(password, hash2) == True
|
||||||
|
|
||||||
|
|
||||||
|
class TestJWTTokens:
|
||||||
|
"""Test JWT token creation and validation."""
|
||||||
|
|
||||||
|
def test_create_access_token(self):
|
||||||
|
"""Test JWT token creation."""
|
||||||
|
data = {"sub": str(uuid.uuid4()), "email": "test@example.com"}
|
||||||
|
token = create_access_token(data)
|
||||||
|
|
||||||
|
assert isinstance(token, str)
|
||||||
|
assert len(token.split('.')) == 3 # JWT format: header.payload.signature
|
||||||
|
|
||||||
|
def test_create_token_with_different_data(self):
|
||||||
|
"""Test that different data creates different tokens."""
|
||||||
|
data1 = {"sub": str(uuid.uuid4()), "email": "user1@example.com"}
|
||||||
|
data2 = {"sub": str(uuid.uuid4()), "email": "user2@example.com"}
|
||||||
|
|
||||||
|
token1 = create_access_token(data1)
|
||||||
|
token2 = create_access_token(data2)
|
||||||
|
|
||||||
|
assert token1 != token2
|
||||||
|
|
||||||
|
def test_token_contains_expiration(self):
|
||||||
|
"""Test that created tokens contain expiration claim."""
|
||||||
|
from jose import jwt
|
||||||
|
from src.backend.core.config import settings
|
||||||
|
|
||||||
|
data = {"sub": str(uuid.uuid4())}
|
||||||
|
token = create_access_token(data)
|
||||||
|
|
||||||
|
# Decode without verification to check claims
|
||||||
|
decoded = jwt.get_unverified_claims(token)
|
||||||
|
assert "exp" in decoded
|
||||||
|
assert "sub" in decoded
|
||||||
|
|
||||||
|
|
||||||
|
class TestUserModel:
|
||||||
|
"""Test User model properties and methods."""
|
||||||
|
|
||||||
|
def test_user_full_name_property(self):
|
||||||
|
"""Test that full_name property works correctly."""
|
||||||
|
user = User(
|
||||||
|
email="test@example.com",
|
||||||
|
password_hash="hashed_password",
|
||||||
|
full_name="John Doe"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert user.full_name == "John Doe"
|
||||||
|
assert user.first_name == "John"
|
||||||
|
assert user.last_name == "Doe"
|
||||||
|
|
||||||
|
def test_user_single_name(self):
|
||||||
|
"""Test user with single name."""
|
||||||
|
user = User(
|
||||||
|
email="test@example.com",
|
||||||
|
password_hash="hashed_password",
|
||||||
|
full_name="Madonna"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert user.full_name == "Madonna"
|
||||||
|
assert user.first_name == "Madonna"
|
||||||
|
assert user.last_name == ""
|
||||||
|
|
||||||
|
def test_user_multiple_last_names(self):
|
||||||
|
"""Test user with multiple last names."""
|
||||||
|
user = User(
|
||||||
|
email="test@example.com",
|
||||||
|
password_hash="hashed_password",
|
||||||
|
full_name="John van der Berg"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert user.full_name == "John van der Berg"
|
||||||
|
assert user.first_name == "John"
|
||||||
|
assert user.last_name == "van der Berg"
|
||||||
|
|
||||||
|
def test_user_is_active_property(self):
|
||||||
|
"""Test user is_active property."""
|
||||||
|
user = User(
|
||||||
|
email="test@example.com",
|
||||||
|
password_hash="hashed_password",
|
||||||
|
full_name="Test User"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert user.is_active == True # Default is True
|
||||||
|
|
||||||
|
|
||||||
|
class TestAuthenticationEdgeCases:
|
||||||
|
"""Test edge cases and error conditions."""
|
||||||
|
|
||||||
|
def test_register_empty_names(self, test_client):
|
||||||
|
"""Test registration with empty names."""
|
||||||
|
user_data = {
|
||||||
|
"email": "empty@test.com",
|
||||||
|
"password": "password123",
|
||||||
|
"first_name": "",
|
||||||
|
"last_name": ""
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/register", json=user_data)
|
||||||
|
|
||||||
|
# Should still work but create empty full_name
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["full_name"] == " " # Space between empty names
|
||||||
|
|
||||||
|
def test_register_very_long_email(self, test_client):
|
||||||
|
"""Test registration with very long email."""
|
||||||
|
long_email = "a" * 250 + "@test.com" # Very long email
|
||||||
|
user_data = {
|
||||||
|
"email": long_email,
|
||||||
|
"password": "password123",
|
||||||
|
"first_name": "Long",
|
||||||
|
"last_name": "Email"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/register", json=user_data)
|
||||||
|
|
||||||
|
# Should handle long emails (within DB constraints)
|
||||||
|
if len(long_email) <= 255:
|
||||||
|
assert response.status_code == 200
|
||||||
|
else:
|
||||||
|
assert response.status_code in [400, 422]
|
||||||
|
|
||||||
|
def test_register_unicode_names(self, test_client):
|
||||||
|
"""Test registration with unicode characters in names."""
|
||||||
|
user_data = {
|
||||||
|
"email": "unicode@test.com",
|
||||||
|
"password": "password123",
|
||||||
|
"first_name": "José",
|
||||||
|
"last_name": "González"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/register", json=user_data)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["full_name"] == "José González"
|
||||||
|
assert data["first_name"] == "José"
|
||||||
|
assert data["last_name"] == "González"
|
||||||
|
|
||||||
|
def test_case_insensitive_email_login(self, test_client, test_user):
|
||||||
|
"""Test that email login is case insensitive."""
|
||||||
|
# Try login with different case
|
||||||
|
login_data = {
|
||||||
|
"email": test_user.email.upper(),
|
||||||
|
"password": "testpassword123"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = test_client.post("/api/auth/login", json=login_data)
|
||||||
|
|
||||||
|
# This might fail if email comparison is case-sensitive
|
||||||
|
# The actual behavior depends on implementation
|
||||||
|
assert response.status_code in [200, 401]
|
||||||
Reference in New Issue
Block a user