Fix: Rename directory to remove & character causing shell issues
Renamed ebook_backend&admin_panel to ebook_backend_admin_panel The & character was being interpreted by shell as background process operator, causing 'Dockerfile not found' errors in Coolify.
This commit is contained in:
253
ebook_backend_admin_panel/admin-backend/init_db.py
Normal file
253
ebook_backend_admin_panel/admin-backend/init_db.py
Normal file
@@ -0,0 +1,253 @@
|
||||
"""
|
||||
Database Initialization Script
|
||||
|
||||
This script automatically initializes the database on application startup:
|
||||
- Creates all required tables if they don't exist
|
||||
- Creates default admin user if no admin exists
|
||||
- Runs automatically when the application starts
|
||||
- Safe to run multiple times (idempotent)
|
||||
|
||||
Usage:
|
||||
This file is automatically called from main.py lifespan event.
|
||||
No manual execution required.
|
||||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from utils.auth import engine, SessionLocal, Base, hash_password
|
||||
from models.user import AdminUser
|
||||
from models.coupon import Coupon
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Setup logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_tables():
|
||||
"""
|
||||
Create all database tables if they don't exist.
|
||||
|
||||
This function creates tables for:
|
||||
- AdminUser (admin_users table)
|
||||
- Coupon (coupon_codes table)
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Import all models to ensure they're registered with Base
|
||||
from models.user import AdminUser
|
||||
from models.coupon import Coupon
|
||||
|
||||
# Create all tables
|
||||
Base.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Database tables created/verified successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating database tables: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
def create_default_admin(db: Session) -> bool:
|
||||
"""
|
||||
Create default admin user if no admin exists in the database.
|
||||
|
||||
Reads credentials from environment variables:
|
||||
- ADMIN_USERNAME (default: 'admin')
|
||||
- ADMIN_PASSWORD (default: 'admin123')
|
||||
|
||||
Args:
|
||||
db (Session): Database session
|
||||
|
||||
Returns:
|
||||
bool: True if admin was created or already exists, False on error
|
||||
"""
|
||||
try:
|
||||
# Check if any admin user exists
|
||||
existing_admin = db.query(AdminUser).first()
|
||||
|
||||
if existing_admin:
|
||||
logger.info(f"ℹ️ Admin user already exists: {existing_admin.username}")
|
||||
return True
|
||||
|
||||
# Get admin credentials from environment variables
|
||||
admin_username = os.getenv("ADMIN_USERNAME", "admin")
|
||||
admin_password = os.getenv("ADMIN_PASSWORD", "admin123")
|
||||
|
||||
# Validate credentials
|
||||
if not admin_username or not admin_password:
|
||||
logger.error("❌ ADMIN_USERNAME or ADMIN_PASSWORD not set in environment variables")
|
||||
return False
|
||||
|
||||
# Hash the password
|
||||
password_hash = hash_password(admin_password)
|
||||
|
||||
# Create admin user
|
||||
admin_user = AdminUser(
|
||||
username=admin_username,
|
||||
password_hash=password_hash
|
||||
)
|
||||
|
||||
db.add(admin_user)
|
||||
db.commit()
|
||||
db.refresh(admin_user)
|
||||
|
||||
logger.info(f"✅ Default admin user created successfully: {admin_username}")
|
||||
logger.warning("⚠️ Please change the default admin password in production!")
|
||||
|
||||
return True
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
logger.warning(f"⚠️ Admin user might already exist: {e}")
|
||||
return True # Not a critical error, admin might exist
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"❌ Error creating default admin user: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
def initialize_database():
|
||||
"""
|
||||
Main initialization function that orchestrates database setup.
|
||||
|
||||
This function:
|
||||
1. Creates all required database tables
|
||||
2. Creates default admin user if none exists
|
||||
3. Logs all operations for monitoring
|
||||
|
||||
Returns:
|
||||
bool: True if initialization successful, False otherwise
|
||||
|
||||
Raises:
|
||||
Exception: If critical initialization fails
|
||||
"""
|
||||
logger.info("🚀 Starting database initialization...")
|
||||
|
||||
# Step 1: Create tables
|
||||
if not create_tables():
|
||||
logger.error("❌ Failed to create database tables")
|
||||
raise Exception("Database table creation failed")
|
||||
|
||||
# Step 2: Create default admin user
|
||||
db = SessionLocal()
|
||||
try:
|
||||
if not create_default_admin(db):
|
||||
logger.warning("⚠️ Failed to create default admin user")
|
||||
# Don't raise exception, app can still run
|
||||
|
||||
logger.info("✅ Database initialization completed successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Database initialization failed: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def verify_database_connection():
|
||||
"""
|
||||
Verify that database connection is working.
|
||||
|
||||
Returns:
|
||||
bool: True if connection successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
db = SessionLocal()
|
||||
db.execute(text("SELECT 1"))
|
||||
db.close()
|
||||
logger.info("✅ Database connection verified")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Database connection failed: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
def get_admin_stats(db: Session) -> dict:
|
||||
"""
|
||||
Get statistics about the database for logging purposes.
|
||||
|
||||
Args:
|
||||
db (Session): Database session
|
||||
|
||||
Returns:
|
||||
dict: Statistics including admin count, coupon count, etc.
|
||||
"""
|
||||
try:
|
||||
admin_count = db.query(AdminUser).count()
|
||||
coupon_count = db.query(Coupon).count()
|
||||
|
||||
return {
|
||||
"admin_users": admin_count,
|
||||
"total_coupons": coupon_count,
|
||||
"database_healthy": True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting database stats: {e}")
|
||||
return {
|
||||
"database_healthy": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""
|
||||
Allow manual execution for testing purposes.
|
||||
|
||||
Usage:
|
||||
python init_db.py
|
||||
"""
|
||||
# Setup basic logging for standalone execution
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
print("=" * 60)
|
||||
print("DATABASE INITIALIZATION SCRIPT")
|
||||
print("=" * 60)
|
||||
print()
|
||||
|
||||
# Verify connection
|
||||
if not verify_database_connection():
|
||||
print("❌ Cannot connect to database. Please check your DATABASE_URL")
|
||||
exit(1)
|
||||
|
||||
# Initialize database
|
||||
try:
|
||||
initialize_database() # noqa: E722
|
||||
|
||||
# Show stats
|
||||
db = SessionLocal()
|
||||
stats = get_admin_stats(db)
|
||||
db.close()
|
||||
|
||||
print()
|
||||
print("=" * 60)
|
||||
print("DATABASE STATISTICS")
|
||||
print("=" * 60)
|
||||
print(f"Admin Users: {stats.get('admin_users', 0)}")
|
||||
print(f"Total Coupons: {stats.get('total_coupons', 0)}")
|
||||
print(f"Status: {'✅ Healthy' if stats.get('database_healthy') else '❌ Unhealthy'}")
|
||||
print("=" * 60)
|
||||
print()
|
||||
print("✅ Database initialization completed successfully!")
|
||||
print()
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Initialization failed: {e}\n")
|
||||
exit(1)
|
||||
|
||||
330
ebook_backend_admin_panel/admin-backend/main.py
Normal file
330
ebook_backend_admin_panel/admin-backend/main.py
Normal file
@@ -0,0 +1,330 @@
|
||||
from fastapi import FastAPI, Request, status
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.middleware.trustedhost import TrustedHostMiddleware
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||
import time
|
||||
import os
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Dict, Any
|
||||
from routes import auth
|
||||
from utils.logger import setup_logger
|
||||
from utils.exceptions import APIException, handle_api_exception
|
||||
from models.user import AdminUser
|
||||
from models.coupon import Coupon
|
||||
from utils.auth import engine
|
||||
from init_db import initialize_database
|
||||
|
||||
# Setup logging
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
# Application configuration
|
||||
class AppConfig:
|
||||
"""Application configuration class"""
|
||||
APP_NAME = os.getenv("APP_NAME")
|
||||
VERSION = os.getenv("APP_VERSION")
|
||||
DEBUG = os.getenv("DEBUG", "false").lower() == "true"
|
||||
ENVIRONMENT = os.getenv("ENVIRONMENT", "development")
|
||||
|
||||
# CORS settings - parse comma-separated string
|
||||
_cors_origins_str = os.getenv("CORS_ORIGINS", "")
|
||||
CORS_ORIGINS = [origin.strip() for origin in _cors_origins_str.split(",") if origin.strip()] if _cors_origins_str else []
|
||||
|
||||
# Trusted hosts for production
|
||||
_trusted_hosts_str = os.getenv("TRUSTED_HOSTS", "*")
|
||||
TRUSTED_HOSTS = [host.strip() for host in _trusted_hosts_str.split(",") if host.strip()] if _trusted_hosts_str != "*" else ["*"]
|
||||
|
||||
# Application lifespan manager
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Manage application startup and shutdown events"""
|
||||
# Startup
|
||||
logger.info(
|
||||
"Application starting up",
|
||||
extra={
|
||||
"app_name": AppConfig.APP_NAME,
|
||||
"version": AppConfig.VERSION,
|
||||
"environment": AppConfig.ENVIRONMENT,
|
||||
"debug": AppConfig.DEBUG
|
||||
}
|
||||
)
|
||||
|
||||
# Ensure required directories exist
|
||||
ensure_directories()
|
||||
|
||||
# Initialize database: create tables and default admin user
|
||||
try:
|
||||
initialize_database()
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing database: {e}")
|
||||
raise
|
||||
|
||||
yield
|
||||
# Shutdown
|
||||
logger.info("Application shutting down")
|
||||
|
||||
def ensure_directories():
|
||||
"""Ensure required directories exist"""
|
||||
directories = [
|
||||
"translation_upload",
|
||||
"logs"
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
logger.debug(f"Ensured directory exists: {directory}")
|
||||
|
||||
# Create FastAPI application with enterprise features
|
||||
app = FastAPI(
|
||||
title=AppConfig.APP_NAME,
|
||||
version=AppConfig.VERSION,
|
||||
description="Enterprise-grade Ebook Coupon Management System API",
|
||||
docs_url="/docs" if AppConfig.DEBUG else None,
|
||||
redoc_url="/redoc" if AppConfig.DEBUG else None,
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
# Get paths relative to backend/main.py
|
||||
BASE_DIR = os.path.dirname(__file__)
|
||||
PARENT_DIR = os.path.abspath(os.path.join(BASE_DIR, ".."))
|
||||
ADMIN_PANEL_DIR = os.path.join(PARENT_DIR, "admin-frontend")
|
||||
|
||||
# Mount static files
|
||||
app.mount("/static", StaticFiles(directory=ADMIN_PANEL_DIR), name="static")
|
||||
|
||||
# Setup templates
|
||||
templates = Jinja2Templates(directory=ADMIN_PANEL_DIR)
|
||||
|
||||
# Add middleware for production readiness
|
||||
# NOTE: TrustedHostMiddleware disabled when behind reverse proxy (Traefik/Coolify)
|
||||
# The reverse proxy handles host validation
|
||||
# if AppConfig.ENVIRONMENT == "production":
|
||||
# # Trusted host middleware for production security
|
||||
# app.add_middleware(
|
||||
# TrustedHostMiddleware,
|
||||
# allowed_hosts=AppConfig.TRUSTED_HOSTS
|
||||
# )
|
||||
|
||||
# CORS middleware for cross-origin requests
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=AppConfig.CORS_ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Request timing and logging middleware
|
||||
@app.middleware("http")
|
||||
async def add_process_time_header(request: Request, call_next):
|
||||
"""Add request processing time and logging"""
|
||||
start_time = time.time()
|
||||
|
||||
# Generate request ID for tracking
|
||||
request_id = f"{int(start_time * 1000)}"
|
||||
request.state.request_id = request_id
|
||||
|
||||
# Log incoming request
|
||||
logger.info(
|
||||
f"Incoming request: {request.method} {request.url.path}",
|
||||
extra={
|
||||
"request_id": request_id,
|
||||
"method": request.method,
|
||||
"path": request.url.path,
|
||||
"client_ip": request.client.host,
|
||||
"user_agent": request.headers.get("user-agent", "")
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
response = await call_next(request)
|
||||
process_time = time.time() - start_time
|
||||
|
||||
# Add headers for monitoring
|
||||
response.headers["X-Process-Time"] = f"{process_time:.4f}"
|
||||
response.headers["X-Request-ID"] = request_id
|
||||
|
||||
# Log successful response
|
||||
logger.info(
|
||||
f"Request completed: {request.method} {request.url.path}",
|
||||
extra={
|
||||
"request_id": request_id,
|
||||
"status_code": response.status_code,
|
||||
"process_time": process_time
|
||||
}
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
process_time = time.time() - start_time
|
||||
logger.error(
|
||||
f"Request failed: {request.method} {request.url.path}",
|
||||
extra={
|
||||
"request_id": request_id,
|
||||
"error": str(e),
|
||||
"process_time": process_time
|
||||
},
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
# Exception handlers for proper error responses
|
||||
@app.exception_handler(APIException)
|
||||
async def api_exception_handler(request: Request, exc: APIException):
|
||||
"""Handle custom API exceptions"""
|
||||
logger.warning(
|
||||
f"API Exception: {exc.detail}",
|
||||
extra={
|
||||
"request_id": getattr(request.state, "request_id", "unknown"),
|
||||
"status_code": exc.status_code,
|
||||
"path": request.url.path
|
||||
}
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={
|
||||
"success": False,
|
||||
"error": exc.detail,
|
||||
"error_code": exc.error_code,
|
||||
"timestamp": time.time(),
|
||||
"path": str(request.url.path)
|
||||
}
|
||||
)
|
||||
|
||||
@app.exception_handler(RequestValidationError)
|
||||
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
||||
"""Handle validation errors"""
|
||||
# Safely extract error details
|
||||
try:
|
||||
error_details = []
|
||||
for error in exc.errors():
|
||||
safe_error = {
|
||||
"type": error.get("type", "unknown"),
|
||||
"loc": error.get("loc", []),
|
||||
"msg": str(error.get("msg", "Unknown error")),
|
||||
"input": str(error.get("input", "Unknown input"))
|
||||
}
|
||||
if "ctx" in error and error["ctx"]:
|
||||
safe_error["ctx"] = {k: str(v) for k, v in error["ctx"].items()}
|
||||
error_details.append(safe_error)
|
||||
except Exception:
|
||||
error_details = [{"type": "validation_error", "msg": "Request validation failed"}]
|
||||
|
||||
logger.warning(
|
||||
"Validation error",
|
||||
extra={
|
||||
"request_id": getattr(request.state, "request_id", "unknown"),
|
||||
"errors": error_details,
|
||||
"path": request.url.path
|
||||
}
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=422,
|
||||
content={
|
||||
"success": False,
|
||||
"error": "Validation Error",
|
||||
"error_code": "VALIDATION_ERROR",
|
||||
"detail": "Request validation failed",
|
||||
"timestamp": time.time(),
|
||||
"path": str(request.url.path),
|
||||
"details": error_details
|
||||
}
|
||||
)
|
||||
|
||||
@app.exception_handler(StarletteHTTPException)
|
||||
async def http_exception_handler(request: Request, exc: StarletteHTTPException):
|
||||
"""Handle HTTP exceptions"""
|
||||
logger.warning(
|
||||
f"HTTP Exception: {exc.status_code}",
|
||||
extra={
|
||||
"request_id": getattr(request.state, "request_id", "unknown"),
|
||||
"status_code": exc.status_code,
|
||||
"detail": exc.detail,
|
||||
"path": request.url.path
|
||||
}
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={
|
||||
"success": False,
|
||||
"error": "HTTP Error",
|
||||
"detail": exc.detail,
|
||||
"timestamp": time.time(),
|
||||
"path": str(request.url.path)
|
||||
}
|
||||
)
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def generic_exception_handler(request: Request, exc: Exception):
|
||||
"""Handle generic exceptions"""
|
||||
logger.error(
|
||||
"Unhandled exception",
|
||||
extra={
|
||||
"request_id": getattr(request.state, "request_id", "unknown"),
|
||||
"exception_type": type(exc).__name__,
|
||||
"exception_message": str(exc),
|
||||
"path": request.url.path
|
||||
},
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={
|
||||
"success": False,
|
||||
"error": "Internal Server Error",
|
||||
"error_code": "INTERNAL_ERROR",
|
||||
"detail": "An unexpected error occurred",
|
||||
"timestamp": time.time(),
|
||||
"path": str(request.url.path)
|
||||
}
|
||||
)
|
||||
|
||||
# Health check endpoint
|
||||
@app.get("/health", tags=["Health"])
|
||||
async def health_check() -> Dict[str, Any]:
|
||||
"""Health check endpoint for monitoring"""
|
||||
from utils.auth import get_db
|
||||
from sqlalchemy import text
|
||||
|
||||
# Check database connection
|
||||
db_status = "connected"
|
||||
try:
|
||||
db = next(get_db())
|
||||
db.execute(text("SELECT 1"))
|
||||
db.close()
|
||||
except Exception as e:
|
||||
db_status = "disconnected"
|
||||
logger.error("Database health check failed", extra={"error": str(e)})
|
||||
|
||||
return {
|
||||
"status": "healthy" if db_status == "connected" else "unhealthy",
|
||||
"timestamp": time.time(),
|
||||
"version": AppConfig.VERSION,
|
||||
"environment": AppConfig.ENVIRONMENT,
|
||||
"database_status": db_status
|
||||
}
|
||||
|
||||
# Include routers - auth.router handles / and /login HTML pages
|
||||
app.include_router(auth.router, prefix="/auth", tags=["Auth"])
|
||||
app.include_router(auth.router, prefix="", tags=["Auth"])
|
||||
|
||||
# API info endpoint (moved from / to /api to avoid conflict with auth.router)
|
||||
@app.get("/api", tags=["API Info"])
|
||||
async def api_info() -> Dict[str, Any]:
|
||||
"""API information endpoint"""
|
||||
return {
|
||||
"message": AppConfig.APP_NAME,
|
||||
"version": AppConfig.VERSION,
|
||||
"environment": AppConfig.ENVIRONMENT,
|
||||
"docs_url": "/docs" if AppConfig.DEBUG else None,
|
||||
"health_check": "/health"
|
||||
}
|
||||
134
ebook_backend_admin_panel/admin-backend/manage_test_db.py
Normal file
134
ebook_backend_admin_panel/admin-backend/manage_test_db.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""
|
||||
Test Database Management Script
|
||||
This script helps create and manage the test database for unit tests.
|
||||
"""
|
||||
|
||||
import psycopg2
|
||||
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
||||
import sys
|
||||
|
||||
# Test database configuration
|
||||
TEST_DB_NAME = "test_ebook_db"
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
TEST_DB_URL = os.getenv("TEST_DATABASE_URL", "postgresql://postgres:postgres@localhost:5432/test_ebook_db")
|
||||
|
||||
def create_test_database():
|
||||
"""Create test database if it doesn't exist"""
|
||||
try:
|
||||
# Connect to default postgres database to create test database
|
||||
conn = psycopg2.connect(
|
||||
host="localhost",
|
||||
port="5432",
|
||||
user="postgres",
|
||||
password="postgres",
|
||||
database="postgres"
|
||||
)
|
||||
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if test database exists
|
||||
cursor.execute("SELECT 1 FROM pg_database WHERE datname = %s", (TEST_DB_NAME,))
|
||||
exists = cursor.fetchone()
|
||||
|
||||
if not exists:
|
||||
cursor.execute(f"CREATE DATABASE {TEST_DB_NAME}")
|
||||
print(f"✅ Created test database: {TEST_DB_NAME}")
|
||||
else:
|
||||
print(f"ℹ️ Test database {TEST_DB_NAME} already exists")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error creating test database: {e}")
|
||||
return False
|
||||
|
||||
def drop_test_database():
|
||||
"""Drop test database"""
|
||||
try:
|
||||
# Connect to default postgres database to drop test database
|
||||
conn = psycopg2.connect(
|
||||
host="localhost",
|
||||
port="5432",
|
||||
user="postgres",
|
||||
password="postgres",
|
||||
database="postgres"
|
||||
)
|
||||
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Terminate all connections to test database
|
||||
cursor.execute(f"""
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = '{TEST_DB_NAME}' AND pid <> pg_backend_pid()
|
||||
""")
|
||||
|
||||
cursor.execute(f"DROP DATABASE IF EXISTS {TEST_DB_NAME}")
|
||||
print(f"🗑️ Dropped test database: {TEST_DB_NAME}")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error dropping test database: {e}")
|
||||
return False
|
||||
|
||||
def check_test_database():
|
||||
"""Check if test database exists"""
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host="localhost",
|
||||
port="5432",
|
||||
user="postgres",
|
||||
password="postgres",
|
||||
database="postgres"
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT 1 FROM pg_database WHERE datname = %s", (TEST_DB_NAME,))
|
||||
exists = cursor.fetchone()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
if exists:
|
||||
print(f"✅ Test database {TEST_DB_NAME} exists")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Test database {TEST_DB_NAME} does not exist")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error checking test database: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main function to handle command line arguments"""
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python manage_test_db.py [create|drop|check]")
|
||||
print(" create - Create test database")
|
||||
print(" drop - Drop test database")
|
||||
print(" check - Check if test database exists")
|
||||
return
|
||||
|
||||
command = sys.argv[1].lower()
|
||||
|
||||
if command == "create":
|
||||
create_test_database()
|
||||
elif command == "drop":
|
||||
drop_test_database()
|
||||
elif command == "check":
|
||||
check_test_database()
|
||||
else:
|
||||
print(f"Unknown command: {command}")
|
||||
print("Available commands: create, drop, check")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
22
ebook_backend_admin_panel/admin-backend/models/coupon.py
Normal file
22
ebook_backend_admin_panel/admin-backend/models/coupon.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
from utils.auth import Base
|
||||
|
||||
class Coupon(Base):
|
||||
"""
|
||||
SQLAlchemy model representing a coupon code entry in the database.
|
||||
|
||||
Attributes:
|
||||
id (int): Primary key identifier.
|
||||
code (str): Unique coupon code string.
|
||||
usage_count (int): Number of times the coupon has been used.
|
||||
created_at (datetime): Timestamp of coupon creation (stored in Europe/Bratislava timezone).
|
||||
used_at (datetime | None): Timestamp of the last usage, nullable.
|
||||
"""
|
||||
__tablename__ = "coupon_codes"
|
||||
id = Column(Integer, primary_key=True)
|
||||
code = Column(String, unique=True)
|
||||
usage_count = Column(Integer, default=0)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(pytz.timezone('Europe/Bratislava')))
|
||||
used_at = Column(DateTime, nullable=True)
|
||||
20
ebook_backend_admin_panel/admin-backend/models/user.py
Normal file
20
ebook_backend_admin_panel/admin-backend/models/user.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
from utils.auth import Base
|
||||
|
||||
class AdminUser(Base):
|
||||
"""
|
||||
SQLAlchemy model representing an admin user.
|
||||
|
||||
Attributes:
|
||||
id (int): Primary key identifier.
|
||||
username (str): Unique admin username.
|
||||
password_hash (str): Hashed password for authentication.
|
||||
created_at (datetime): Timestamp of account creation (stored in Europe/Bratislava timezone).
|
||||
"""
|
||||
__tablename__ = "admin_users"
|
||||
id = Column(Integer, primary_key=True)
|
||||
username = Column(String, unique=True, nullable=False)
|
||||
password_hash = Column(String, nullable=False)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(pytz.timezone('Europe/Bratislava')))
|
||||
12
ebook_backend_admin_panel/admin-backend/pytest.ini
Normal file
12
ebook_backend_admin_panel/admin-backend/pytest.ini
Normal file
@@ -0,0 +1,12 @@
|
||||
[tool:pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
asyncio_mode = auto
|
||||
addopts = -v --tb=short --maxfail=5 --durations=10 --disable-warnings --no-header
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
||||
ignore::PendingDeprecationWarning
|
||||
ignore::UserWarning
|
||||
|
||||
514
ebook_backend_admin_panel/admin-backend/routes/auth.py
Normal file
514
ebook_backend_admin_panel/admin-backend/routes/auth.py
Normal file
@@ -0,0 +1,514 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Form, status, Request, UploadFile, File
|
||||
from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from models.user import AdminUser
|
||||
from utils.auth import get_db, hash_password, verify_password
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from utils.template_loader import templates
|
||||
from models.coupon import Coupon
|
||||
from utils.coupon_utils import generate_coupon
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
from utils.timezone_utils import format_cest_datetime
|
||||
from schemas import AdminLogin, CodeItem, CouponUpload
|
||||
from fastapi.responses import StreamingResponse
|
||||
import os
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/login", response_class=HTMLResponse)
|
||||
async def login_page(request: Request):
|
||||
"""
|
||||
Render the admin login page.
|
||||
Args:
|
||||
request (Request): The incoming request object.
|
||||
Returns:
|
||||
HTMLResponse: Rendered login page.
|
||||
"""
|
||||
# return templates.TemplateResponse("admin_login.html", {"request": request})
|
||||
return templates.TemplateResponse(request, "admin_login.html", {"data": "something"})
|
||||
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
def admin_panel(request: Request):
|
||||
"""
|
||||
Render the admin dashboard if logged in.
|
||||
Args:
|
||||
request (Request): The incoming request object.
|
||||
Returns:
|
||||
HTMLResponse or RedirectResponse: Admin dashboard or redirect to login.
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
return RedirectResponse(url="/login", status_code=status.HTTP_302_FOUND)
|
||||
# return templates.TemplateResponse("admin_dashboard.html", {"request": request})
|
||||
return templates.TemplateResponse(request, "admin_dashboard.html", {"data": "something"})
|
||||
|
||||
|
||||
|
||||
@router.post("/admin/login")
|
||||
def login(data: AdminLogin, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Handle admin login and set authentication cookie.
|
||||
Args:
|
||||
data (AdminLogin): Login data with username and password.
|
||||
db (Session): Database session.
|
||||
Returns:
|
||||
JSONResponse: Login status.
|
||||
"""
|
||||
user = db.query(AdminUser).filter_by(username=data.username).first()
|
||||
if not user or not verify_password(data.password, user.password_hash):
|
||||
raise HTTPException(status_code=401, detail="Invalid credentials")
|
||||
response = JSONResponse(content={"status": "success"})
|
||||
response.set_cookie("admin_logged_in", "true", httponly=True, samesite="strict")
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/admin/logout")
|
||||
def logout():
|
||||
"""
|
||||
Handle admin logout and clear the authentication cookie.
|
||||
Returns:
|
||||
JSONResponse: Logout status.
|
||||
"""
|
||||
response = JSONResponse(content={"status": "success"})
|
||||
response.delete_cookie("admin_logged_in")
|
||||
return response
|
||||
|
||||
@router.post("/generate")
|
||||
async def generate_code(mode: str = Form(...), count: int = Form(1), db: Session = Depends(get_db),
|
||||
request: Request = None):
|
||||
"""
|
||||
Generate coupon codes (single or bulk).
|
||||
Args:
|
||||
mode (str): 'single' or 'bulk'.
|
||||
count (int): Number of codes to generate (used for bulk).
|
||||
db (Session): Database session.
|
||||
request (Request): Incoming request for auth check.
|
||||
Returns:
|
||||
dict: Generated codes.
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
new_codes = []
|
||||
if mode == "single":
|
||||
new_code = generate_coupon().upper() # Convert to uppercase
|
||||
db_code = Coupon(code=new_code, usage_count=0)
|
||||
db.add(db_code)
|
||||
db.commit()
|
||||
return {"code": new_code}
|
||||
elif mode == "bulk":
|
||||
for _ in range(count):
|
||||
code = generate_coupon().upper() # Convert to uppercase
|
||||
db_code = Coupon(code=code, usage_count=0)
|
||||
db.add(db_code)
|
||||
new_codes.append(code)
|
||||
db.commit()
|
||||
return {"codes": new_codes}
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="Invalid mode")
|
||||
|
||||
|
||||
@router.get("/list")
|
||||
async def list_codes(page: int = 1, limit: int = 20, db: Session = Depends(get_db)):
|
||||
"""
|
||||
List paginated coupon codes sorted by usage count.
|
||||
Args:
|
||||
page (int): Page number.
|
||||
limit (int): Items per page.
|
||||
db (Session): Database session.
|
||||
Returns:
|
||||
dict: Paginated coupon data.
|
||||
"""
|
||||
offset = (page - 1) * limit
|
||||
total_coupons = db.query(Coupon).count()
|
||||
coupons = db.query(Coupon).order_by(Coupon.usage_count.desc()).offset(offset).limit(limit).all()
|
||||
|
||||
return {
|
||||
"codes": [{"code": c.code, "used_at": format_cest_datetime(c.used_at) if c.used_at else None, "usage_count": c.usage_count} for c in coupons],
|
||||
"total": total_coupons,
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total_pages": (total_coupons + limit - 1) // limit
|
||||
}
|
||||
|
||||
|
||||
@router.get("/search-codes")
|
||||
def search_codes(query: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Search coupon codes by partial match (case-insensitive).
|
||||
Args:
|
||||
query (str): Search query.
|
||||
db (Session): Database session.
|
||||
Returns:
|
||||
list: Matching coupon codes.
|
||||
"""
|
||||
# Search with case-insensitive matching
|
||||
codes = (
|
||||
db.query(Coupon)
|
||||
.filter(Coupon.code.ilike(f"%{query.upper()}%"))
|
||||
.all()
|
||||
)
|
||||
return [{"code": c.code, "used": c.usage_count, "usage_count": c.usage_count, "used_at": format_cest_datetime(c.used_at) if c.used_at else None} for c in codes]
|
||||
|
||||
|
||||
@router.post("/use-code")
|
||||
async def use_code(item: dict, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Mark a coupon code as used (only if not already used).
|
||||
Args:
|
||||
item (dict): Dictionary containing the code to mark as used.
|
||||
db (Session): Database session.
|
||||
Returns:
|
||||
dict: Updated code and timestamp.
|
||||
"""
|
||||
code = item["code"].strip()
|
||||
coupon = db.query(Coupon).filter(Coupon.code.ilike(code)).first()
|
||||
if not coupon:
|
||||
raise HTTPException(status_code=404, detail="Invalid code")
|
||||
if coupon.usage_count >= 1:
|
||||
raise HTTPException(status_code=400, detail="Coupon already used")
|
||||
coupon.usage_count += 1
|
||||
coupon.used_at = datetime.now(pytz.timezone('Asia/Kolkata'))
|
||||
db.commit()
|
||||
return {"code": coupon.code, "used_at": format_cest_datetime(coupon.used_at)}
|
||||
|
||||
|
||||
@router.get("/check-code/{code}")
|
||||
async def check_code(code: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Check if a specific coupon code exists and its usage count.
|
||||
Args:
|
||||
code (str): Coupon code to check.
|
||||
db (Session): Database session.
|
||||
Returns:
|
||||
dict: Code and usage count.
|
||||
"""
|
||||
# Use case-insensitive search to handle both cases
|
||||
coupon = db.query(Coupon).filter(Coupon.code.ilike(code.strip())).first()
|
||||
if not coupon:
|
||||
raise HTTPException(status_code=404, detail="Code not found")
|
||||
return {"code": coupon.code, "used": coupon.usage_count}
|
||||
|
||||
|
||||
@router.post("/verify")
|
||||
async def verify_coupon(coupon_req: dict, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Verify and mark a coupon as used if it exists and is unused.
|
||||
Args:
|
||||
coupon_req (dict): Dictionary with 'code' key.
|
||||
db (Session): Database session.
|
||||
Returns:
|
||||
dict: Success message and timestamp.
|
||||
"""
|
||||
raw_code = coupon_req["code"]
|
||||
code = raw_code.strip()
|
||||
coupon = db.query(Coupon).filter(Coupon.code.ilike(code)).first()
|
||||
if not coupon:
|
||||
raise HTTPException(status_code=404, detail="Invalid coupon code")
|
||||
if coupon.usage_count >= 1:
|
||||
raise HTTPException(status_code=400, detail="Coupon already used")
|
||||
coupon.usage_count += 1
|
||||
coupon.used_at = datetime.now(pytz.timezone('Asia/Kolkata'))
|
||||
db.commit()
|
||||
return {"message": "Coupon verified", "used_at": format_cest_datetime(coupon.used_at)}
|
||||
|
||||
|
||||
@router.post("/upload-codes")
|
||||
async def upload_codes(upload_data: CouponUpload, db: Session = Depends(get_db), request: Request = None):
|
||||
"""
|
||||
Upload multiple coupon codes from Excel data.
|
||||
Args:
|
||||
upload_data (CouponUpload): Pydantic model containing code list.
|
||||
db (Session): Database session.
|
||||
request (Request): Request object for auth check.
|
||||
Returns:
|
||||
dict: Upload summary (uploaded, skipped, total).
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
uploaded = 0
|
||||
skipped = 0
|
||||
|
||||
for coupon_data in upload_data.codes:
|
||||
try:
|
||||
# Normalize code to uppercase
|
||||
normalized_code = coupon_data.code.strip().upper()
|
||||
|
||||
# Check if code already exists
|
||||
existing_coupon = db.query(Coupon).filter(Coupon.code == normalized_code).first()
|
||||
if existing_coupon:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
# Create new coupon with usage count from Excel
|
||||
new_coupon = Coupon(
|
||||
code=normalized_code, # Store as uppercase
|
||||
usage_count=coupon_data.usage
|
||||
)
|
||||
db.add(new_coupon)
|
||||
uploaded += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error inserting code {coupon_data.code}: {e}")
|
||||
skipped += 1
|
||||
|
||||
try:
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
|
||||
|
||||
return {
|
||||
"uploaded": uploaded,
|
||||
"skipped": skipped,
|
||||
"total": len(upload_data.codes)
|
||||
}
|
||||
|
||||
|
||||
@router.post("/add-code")
|
||||
def add_code(item: CodeItem, db: Session = Depends(get_db), request: Request = None):
|
||||
"""
|
||||
Add a single coupon code manually.
|
||||
Args:
|
||||
item (CodeItem): Coupon data from request body.
|
||||
db (Session): Database session.
|
||||
request (Request): Request object for auth check.
|
||||
Returns:
|
||||
dict: Success message.
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
# Normalize code to uppercase for consistency
|
||||
normalized_code = item.code.strip().upper()
|
||||
|
||||
existing = db.query(Coupon).filter(Coupon.code == normalized_code).first()
|
||||
if existing:
|
||||
raise HTTPException(status_code=400, detail="Code already exists")
|
||||
|
||||
new_coupon = Coupon(
|
||||
code=normalized_code, # Store as uppercase
|
||||
usage_count=max(0, item.usage)
|
||||
)
|
||||
db.add(new_coupon)
|
||||
db.commit()
|
||||
return {"message": "Code added successfully"}
|
||||
|
||||
|
||||
@router.delete("/delete-code/{code}")
|
||||
def delete_code(code: str, db: Session = Depends(get_db), request: Request = None):
|
||||
"""
|
||||
Delete a specific coupon code.
|
||||
Args:
|
||||
code (str): Coupon code to delete.
|
||||
db (Session): Database session.
|
||||
request (Request): Request object for auth check.
|
||||
Returns:
|
||||
dict: Success message.
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
# Use case-insensitive search to handle both uppercase and lowercase codes
|
||||
coupon = db.query(Coupon).filter(Coupon.code.ilike(code.strip())).first()
|
||||
if not coupon:
|
||||
raise HTTPException(status_code=404, detail="Code not found")
|
||||
|
||||
db.delete(coupon)
|
||||
db.commit()
|
||||
return {"message": "Code deleted successfully"}
|
||||
|
||||
|
||||
# Translation file management
|
||||
TRANSLATION_DIR = os.path.join(os.path.dirname(__file__), '..', 'translationfile')
|
||||
TRANSLATION_DIR = os.path.abspath(TRANSLATION_DIR)
|
||||
TRANSLATION_FILENAME = 'translation.xlsx'
|
||||
TRANSLATION_PATH = os.path.join(TRANSLATION_DIR, TRANSLATION_FILENAME)
|
||||
|
||||
|
||||
@router.post("/upload-translations")
|
||||
async def upload_translation(file: UploadFile = File(...), request: Request = None):
|
||||
"""
|
||||
Upload a new translation Excel file. Stores the file on disk and saves the original filename in metadata.
|
||||
Args:
|
||||
file (UploadFile): The uploaded Excel file.
|
||||
request (Request): Request object to check admin authentication.
|
||||
Returns:
|
||||
dict: Success message with original filename.
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
# Create directory if it doesn't exist
|
||||
if not os.path.exists(TRANSLATION_DIR):
|
||||
os.makedirs(TRANSLATION_DIR, exist_ok=True)
|
||||
|
||||
# Check if a translation file already exists
|
||||
if os.path.exists(TRANSLATION_PATH):
|
||||
raise HTTPException(status_code=400, detail="A translation file already exists. Please delete it first.")
|
||||
|
||||
# Store the original filename in a metadata file
|
||||
original_filename = file.filename or "translation.xlsx"
|
||||
metadata_path = os.path.join(TRANSLATION_DIR, 'metadata.txt')
|
||||
|
||||
try:
|
||||
# Read and save the uploaded file
|
||||
content = await file.read()
|
||||
with open(TRANSLATION_PATH, 'wb') as f:
|
||||
f.write(content)
|
||||
|
||||
# Save the original filename to metadata file
|
||||
with open(metadata_path, 'w') as f:
|
||||
f.write(original_filename)
|
||||
|
||||
return {"message": "Translation file uploaded successfully", "filename": original_filename}
|
||||
|
||||
except Exception as e:
|
||||
# Clean up if there was an error
|
||||
if os.path.exists(TRANSLATION_PATH):
|
||||
os.remove(TRANSLATION_PATH)
|
||||
if os.path.exists(metadata_path):
|
||||
os.remove(metadata_path)
|
||||
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/delete-translation")
|
||||
def delete_translation(request: Request = None):
|
||||
"""
|
||||
Delete the uploaded translation file and its metadata.
|
||||
Args:
|
||||
request (Request): Request object to check admin authentication.
|
||||
Returns:
|
||||
dict: Success message if deletion was successful.
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
metadata_path = os.path.join(TRANSLATION_DIR, 'metadata.txt')
|
||||
files_deleted = []
|
||||
|
||||
# Delete the translation file
|
||||
if os.path.exists(TRANSLATION_PATH):
|
||||
os.remove(TRANSLATION_PATH)
|
||||
files_deleted.append("translation file")
|
||||
|
||||
# Delete the metadata file
|
||||
if os.path.exists(metadata_path):
|
||||
os.remove(metadata_path)
|
||||
files_deleted.append("metadata")
|
||||
|
||||
# Delete the translation directory if it exists and is empty
|
||||
if os.path.exists(TRANSLATION_DIR) and not os.listdir(TRANSLATION_DIR):
|
||||
os.rmdir(TRANSLATION_DIR)
|
||||
files_deleted.append("directory")
|
||||
|
||||
if files_deleted:
|
||||
return {"message": f"Translation file deleted successfully"}
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="No translation file found")
|
||||
|
||||
|
||||
@router.get("/download-translation")
|
||||
def download_translation(request: Request = None):
|
||||
"""
|
||||
Download the uploaded translation file with original filename.
|
||||
Args:
|
||||
request (Request): Request object to check admin authentication.
|
||||
Returns:
|
||||
StreamingResponse: Downloadable Excel file.
|
||||
"""
|
||||
if not request.cookies.get("admin_logged_in"):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
if not os.path.exists(TRANSLATION_PATH):
|
||||
raise HTTPException(status_code=404, detail="No translation file found")
|
||||
|
||||
# Get the original filename from metadata
|
||||
metadata_path = os.path.join(TRANSLATION_DIR, 'metadata.txt')
|
||||
original_filename = TRANSLATION_FILENAME # Default filename
|
||||
|
||||
if os.path.exists(metadata_path):
|
||||
try:
|
||||
with open(metadata_path, 'r') as f:
|
||||
stored_filename = f.read().strip()
|
||||
if stored_filename:
|
||||
original_filename = stored_filename
|
||||
except Exception:
|
||||
# If we can't read metadata, use default filename
|
||||
pass
|
||||
|
||||
# Return the file with proper headers
|
||||
def file_generator():
|
||||
with open(TRANSLATION_PATH, 'rb') as f:
|
||||
while True:
|
||||
chunk = f.read(8192) # 8KB chunks
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
return StreamingResponse(
|
||||
file_generator(),
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={"Content-Disposition": f"attachment; filename=\"{original_filename}\""}
|
||||
)
|
||||
|
||||
@router.get("/translations/status")
|
||||
def check_translation_file():
|
||||
"""Check if translation file exists and return filename"""
|
||||
file_exists = os.path.exists(TRANSLATION_PATH)
|
||||
|
||||
if not file_exists:
|
||||
return {"file_exists": False, "file_name": None}
|
||||
|
||||
# Get the original filename from metadata
|
||||
metadata_path = os.path.join(TRANSLATION_DIR, 'metadata.txt')
|
||||
original_filename = TRANSLATION_FILENAME # Default filename
|
||||
|
||||
if os.path.exists(metadata_path):
|
||||
try:
|
||||
with open(metadata_path, 'r') as f:
|
||||
stored_filename = f.read().strip()
|
||||
if stored_filename:
|
||||
original_filename = stored_filename
|
||||
except Exception:
|
||||
# If we can't read metadata, use default filename
|
||||
pass
|
||||
|
||||
return {
|
||||
"file_exists": True,
|
||||
"file_name": original_filename
|
||||
}
|
||||
|
||||
|
||||
@router.get("/translations/latest")
|
||||
def get_latest_translation():
|
||||
"""
|
||||
Legacy endpoint that returns the latest uploaded translation file.
|
||||
Returns:
|
||||
StreamingResponse: Downloadable Excel file.
|
||||
"""
|
||||
if not os.path.exists(TRANSLATION_PATH):
|
||||
raise HTTPException(status_code=404, detail="No translation file found")
|
||||
|
||||
# Get the original filename from metadata for consistency
|
||||
metadata_path = os.path.join(TRANSLATION_DIR, 'metadata.txt')
|
||||
original_filename = TRANSLATION_FILENAME # Default filename
|
||||
|
||||
if os.path.exists(metadata_path):
|
||||
try:
|
||||
with open(metadata_path, 'r') as f:
|
||||
stored_filename = f.read().strip()
|
||||
if stored_filename:
|
||||
original_filename = stored_filename
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return StreamingResponse(
|
||||
open(TRANSLATION_PATH, 'rb'),
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={"Content-Disposition": f"attachment; filename=\"{original_filename}\""}
|
||||
)
|
||||
47
ebook_backend_admin_panel/admin-backend/schemas.py
Normal file
47
ebook_backend_admin_panel/admin-backend/schemas.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import List
|
||||
|
||||
class AdminLogin(BaseModel):
|
||||
"""
|
||||
Schema for admin login credentials.
|
||||
|
||||
Attributes:
|
||||
username (str): Admin username.
|
||||
password (str): Admin password.
|
||||
"""
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class CodeItem(BaseModel):
|
||||
"""
|
||||
Schema representing a coupon code and its usage count.
|
||||
|
||||
Attributes:
|
||||
code (str): The coupon code.
|
||||
usage (int): Number of times the code has been used.
|
||||
"""
|
||||
code: str
|
||||
usage: int
|
||||
|
||||
|
||||
class CouponUploadItem(BaseModel):
|
||||
"""
|
||||
Schema for an individual coupon code to be uploaded.
|
||||
|
||||
Attributes:
|
||||
code (str): The coupon code.
|
||||
usage (int): Optional initial usage count (default is 0).
|
||||
"""
|
||||
code: str
|
||||
usage: int = 0
|
||||
|
||||
|
||||
class CouponUpload(BaseModel):
|
||||
"""
|
||||
Schema for bulk coupon upload containing a list of coupon items.
|
||||
|
||||
Attributes:
|
||||
codes (List[CouponUploadItem]): List of coupon entries.
|
||||
"""
|
||||
codes: List[CouponUploadItem]
|
||||
168
ebook_backend_admin_panel/admin-backend/tests/conftest.py
Normal file
168
ebook_backend_admin_panel/admin-backend/tests/conftest.py
Normal file
@@ -0,0 +1,168 @@
|
||||
import pytest
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
# Import the app and models
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from main import app
|
||||
from models.user import AdminUser
|
||||
from models.coupon import Coupon
|
||||
from utils.auth import Base, get_db, hash_password
|
||||
from utils.template_loader import templates
|
||||
|
||||
# Test database configuration
|
||||
TEST_DATABASE_URL = "sqlite:///:memory:"
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_engine():
|
||||
"""Create test database engine"""
|
||||
engine = create_engine(
|
||||
TEST_DATABASE_URL,
|
||||
connect_args={"check_same_thread": False},
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
return engine
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_session_factory(test_engine):
|
||||
"""Create test session factory"""
|
||||
TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=test_engine)
|
||||
return TestingSessionLocal
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_db_setup(test_engine):
|
||||
"""Create test database tables once for the session"""
|
||||
Base.metadata.create_all(bind=test_engine)
|
||||
yield
|
||||
Base.metadata.drop_all(bind=test_engine)
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def test_db(test_engine, test_session_factory, test_db_setup):
|
||||
"""Create test database session"""
|
||||
# Create session
|
||||
session = test_session_factory()
|
||||
|
||||
# Clear any existing data
|
||||
for table in reversed(Base.metadata.sorted_tables):
|
||||
session.execute(table.delete())
|
||||
session.commit()
|
||||
|
||||
yield session
|
||||
|
||||
# Cleanup - rollback and close
|
||||
session.rollback()
|
||||
session.close()
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def client(test_db):
|
||||
"""Create test client with database dependency override"""
|
||||
def override_get_db():
|
||||
try:
|
||||
yield test_db
|
||||
finally:
|
||||
pass
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
with TestClient(app) as test_client:
|
||||
yield test_client
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
@pytest.fixture
|
||||
def admin_user(test_db):
|
||||
"""Create a test admin user"""
|
||||
# Clear existing users first
|
||||
test_db.query(AdminUser).delete()
|
||||
test_db.commit()
|
||||
|
||||
user = AdminUser(
|
||||
username="testadmin",
|
||||
password_hash=hash_password("testpassword123")
|
||||
)
|
||||
test_db.add(user)
|
||||
test_db.commit()
|
||||
test_db.refresh(user)
|
||||
return user
|
||||
|
||||
@pytest.fixture
|
||||
def sample_coupons(test_db):
|
||||
"""Create sample coupon codes for testing"""
|
||||
# Clear existing coupons first
|
||||
test_db.query(Coupon).delete()
|
||||
test_db.commit()
|
||||
|
||||
coupons = []
|
||||
codes = ["TEST123", "SAMPLE456", "DEMO789"]
|
||||
|
||||
for code in codes:
|
||||
coupon = Coupon(code=code, usage_count=0)
|
||||
test_db.add(coupon)
|
||||
coupons.append(coupon)
|
||||
|
||||
test_db.commit()
|
||||
for coupon in coupons:
|
||||
test_db.refresh(coupon)
|
||||
|
||||
return coupons
|
||||
|
||||
@pytest.fixture
|
||||
def used_coupon(test_db):
|
||||
"""Create a used coupon for testing"""
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
|
||||
# Clear existing coupons first
|
||||
test_db.query(Coupon).delete()
|
||||
test_db.commit()
|
||||
|
||||
coupon = Coupon(
|
||||
code="USED123",
|
||||
usage_count=1,
|
||||
used_at=datetime.now(pytz.timezone('Asia/Kolkata'))
|
||||
)
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
return coupon
|
||||
|
||||
@pytest.fixture
|
||||
def temp_translation_dir():
|
||||
"""Create temporary directory for translation files"""
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
original_dir = os.path.join(os.path.dirname(__file__), '..', 'translationfile')
|
||||
|
||||
# Mock the translation directory path
|
||||
with patch('routes.auth.TRANSLATION_DIR', temp_dir):
|
||||
with patch('routes.auth.TRANSLATION_PATH', os.path.join(temp_dir, 'translation.xlsx')):
|
||||
yield temp_dir
|
||||
|
||||
# Cleanup
|
||||
shutil.rmtree(temp_dir, ignore_errors=True)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_templates():
|
||||
"""Mock Jinja2 templates"""
|
||||
mock_template = MagicMock()
|
||||
mock_template.TemplateResponse.return_value = MagicMock()
|
||||
|
||||
with patch('routes.auth.templates', mock_template):
|
||||
yield mock_template
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers():
|
||||
"""Return headers for authenticated requests"""
|
||||
return {"Cookie": "admin_logged_in=true"}
|
||||
|
||||
@pytest.fixture
|
||||
def mock_logger():
|
||||
"""Mock logger to avoid file operations during tests"""
|
||||
with patch('utils.logger.setup_logger') as mock:
|
||||
mock.return_value = MagicMock()
|
||||
yield mock
|
||||
@@ -0,0 +1,146 @@
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from fastapi import HTTPException
|
||||
|
||||
class TestAuthRoutes:
|
||||
"""Test cases for authentication routes"""
|
||||
|
||||
def test_admin_login_success(self, client, admin_user):
|
||||
"""Test successful admin login"""
|
||||
login_data = {
|
||||
"username": "testadmin",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "success"
|
||||
|
||||
# Check if cookie is set
|
||||
assert "admin_logged_in=true" in response.headers.get("set-cookie", "")
|
||||
|
||||
def test_admin_login_invalid_username(self, client, test_db):
|
||||
"""Test admin login with invalid username"""
|
||||
login_data = {
|
||||
"username": "nonexistent",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid credentials"
|
||||
|
||||
def test_admin_login_invalid_password(self, client, admin_user):
|
||||
"""Test admin login with invalid password"""
|
||||
login_data = {
|
||||
"username": "testadmin",
|
||||
"password": "wrongpassword"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid credentials"
|
||||
|
||||
def test_admin_login_missing_username(self, client):
|
||||
"""Test admin login with missing username"""
|
||||
login_data = {
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 422 # Validation error
|
||||
|
||||
def test_admin_login_missing_password(self, client):
|
||||
"""Test admin login with missing password"""
|
||||
login_data = {
|
||||
"username": "testadmin"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 422 # Validation error
|
||||
|
||||
def test_admin_logout_with_cookie(self, client):
|
||||
"""Test admin logout when user is logged in"""
|
||||
response = client.post("/admin/logout", headers={"Cookie": "admin_logged_in=true"})
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "success"
|
||||
|
||||
@patch('routes.auth.verify_password')
|
||||
def test_admin_login_password_verification(self, mock_verify, client, admin_user):
|
||||
"""Test password verification during login"""
|
||||
mock_verify.return_value = True
|
||||
|
||||
login_data = {
|
||||
"username": "testadmin",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 200
|
||||
mock_verify.assert_called_once_with("testpassword123", admin_user.password_hash)
|
||||
|
||||
@patch('routes.auth.verify_password')
|
||||
def test_admin_login_password_verification_failure(self, mock_verify, client, admin_user):
|
||||
"""Test password verification failure during login"""
|
||||
mock_verify.return_value = False
|
||||
|
||||
login_data = {
|
||||
"username": "testadmin",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 401
|
||||
mock_verify.assert_called_once_with("testpassword123", admin_user.password_hash)
|
||||
|
||||
def test_admin_login_case_sensitive_username(self, client, admin_user):
|
||||
"""Test admin login with case-sensitive username"""
|
||||
login_data = {
|
||||
"username": "TESTADMIN", # Different case
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid credentials"
|
||||
|
||||
def test_admin_login_empty_credentials(self, client):
|
||||
"""Test admin login with empty credentials"""
|
||||
login_data = {
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid credentials"
|
||||
|
||||
def test_admin_login_whitespace_credentials(self, client):
|
||||
"""Test admin login with whitespace-only credentials"""
|
||||
login_data = {
|
||||
"username": " ",
|
||||
"password": " "
|
||||
}
|
||||
|
||||
response = client.post("/admin/login", json=login_data)
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid credentials"
|
||||
|
||||
def test_admin_logout_response_headers(self, client):
|
||||
"""Test admin logout response headers"""
|
||||
response = client.post("/admin/logout")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check content type
|
||||
assert response.headers["content-type"] == "application/json"
|
||||
|
||||
# Check cookie deletion
|
||||
set_cookie = response.headers.get("set-cookie", "")
|
||||
assert "admin_logged_in=" in set_cookie
|
||||
@@ -0,0 +1,406 @@
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from fastapi import HTTPException
|
||||
|
||||
class TestCouponRoutes:
|
||||
"""Test cases for coupon management routes"""
|
||||
|
||||
def test_generate_single_code_unauthorized(self, client):
|
||||
"""Test generate single code without authentication"""
|
||||
response = client.post("/generate", data={"mode": "single", "count": 1})
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Unauthorized"
|
||||
|
||||
def test_generate_single_code_success(self, client, auth_headers):
|
||||
"""Test successful single code generation"""
|
||||
with patch('routes.auth.generate_coupon') as mock_generate:
|
||||
mock_generate.return_value = "ABC123DEF4"
|
||||
|
||||
response = client.post("/generate", data={"mode": "single", "count": 1}, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["code"] == "ABC123DEF4"
|
||||
mock_generate.assert_called_once()
|
||||
|
||||
def test_generate_bulk_codes_success(self, client, auth_headers):
|
||||
"""Test successful bulk code generation"""
|
||||
with patch('routes.auth.generate_coupon') as mock_generate:
|
||||
mock_generate.side_effect = ["CODE1", "CODE2", "CODE3"]
|
||||
|
||||
response = client.post("/generate", data={"mode": "bulk", "count": 3}, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["codes"] == ["CODE1", "CODE2", "CODE3"]
|
||||
assert mock_generate.call_count == 3
|
||||
|
||||
def test_generate_invalid_mode(self, client, auth_headers):
|
||||
"""Test code generation with invalid mode"""
|
||||
response = client.post("/generate", data={"mode": "invalid", "count": 1}, headers=auth_headers)
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid mode"
|
||||
|
||||
def test_generate_bulk_zero_count(self, client, auth_headers):
|
||||
"""Test bulk generation with zero count"""
|
||||
response = client.post("/generate", data={"mode": "bulk", "count": 0}, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["codes"] == []
|
||||
|
||||
def test_list_codes_pagination(self, client, sample_coupons):
|
||||
"""Test coupon listing with pagination"""
|
||||
response = client.get("/list?page=1&limit=2")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "codes" in data
|
||||
assert "total" in data
|
||||
assert "page" in data
|
||||
assert "limit" in data
|
||||
assert "total_pages" in data
|
||||
|
||||
assert data["page"] == 1
|
||||
assert data["limit"] == 2
|
||||
assert data["total"] == 3
|
||||
assert len(data["codes"]) == 2
|
||||
|
||||
def test_list_codes_default_pagination(self, client, sample_coupons):
|
||||
"""Test coupon listing with default pagination"""
|
||||
response = client.get("/list")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["page"] == 1
|
||||
assert data["limit"] == 20
|
||||
assert len(data["codes"]) == 3
|
||||
|
||||
def test_list_codes_empty_database(self, client):
|
||||
"""Test coupon listing with empty database"""
|
||||
response = client.get("/list")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["codes"] == []
|
||||
assert data["total"] == 0
|
||||
assert data["page"] == 1
|
||||
assert data["limit"] == 20
|
||||
assert data["total_pages"] == 0
|
||||
|
||||
def test_list_codes_second_page(self, client, sample_coupons):
|
||||
"""Test coupon listing second page"""
|
||||
response = client.get("/list?page=2&limit=2")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["page"] == 2
|
||||
assert data["limit"] == 2
|
||||
assert len(data["codes"]) == 1 # Only 1 code left on page 2
|
||||
|
||||
def test_search_codes_success(self, client, sample_coupons):
|
||||
"""Test successful code search"""
|
||||
response = client.get("/search-codes?query=TEST")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data) == 1
|
||||
assert data[0]["code"] == "TEST123"
|
||||
assert "used" in data[0]
|
||||
assert "usage_count" in data[0]
|
||||
assert "used_at" in data[0]
|
||||
|
||||
def test_search_codes_case_insensitive(self, client, sample_coupons):
|
||||
"""Test case-insensitive code search"""
|
||||
response = client.get("/search-codes?query=test")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data) == 1
|
||||
assert data[0]["code"] == "TEST123"
|
||||
|
||||
def test_search_codes_partial_match(self, client, sample_coupons):
|
||||
"""Test partial code search"""
|
||||
response = client.get("/search-codes?query=123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data) == 1
|
||||
assert data[0]["code"] == "TEST123"
|
||||
|
||||
def test_search_codes_no_results(self, client, sample_coupons):
|
||||
"""Test code search with no results"""
|
||||
response = client.get("/search-codes?query=NONEXISTENT")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data == []
|
||||
|
||||
def test_search_codes_empty_query(self, client, sample_coupons):
|
||||
"""Test code search with empty query"""
|
||||
response = client.get("/search-codes?query=")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should return all codes when query is empty
|
||||
assert len(data) == 3
|
||||
|
||||
def test_use_code_success(self, client, sample_coupons):
|
||||
"""Test successful code usage"""
|
||||
response = client.post("/use-code", json={"code": "TEST123"})
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "TEST123"
|
||||
assert "used_at" in data
|
||||
|
||||
def test_use_code_case_insensitive(self, client, sample_coupons):
|
||||
"""Test case-insensitive code usage"""
|
||||
response = client.post("/use-code", json={"code": "test123"})
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "TEST123"
|
||||
|
||||
def test_use_code_not_found(self, client):
|
||||
"""Test using non-existent code"""
|
||||
response = client.post("/use-code", json={"code": "NONEXISTENT"})
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid code"
|
||||
|
||||
def test_use_code_already_used(self, client, used_coupon):
|
||||
"""Test using already used code"""
|
||||
response = client.post("/use-code", json={"code": "USED123"})
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["detail"] == "Coupon already used"
|
||||
|
||||
def test_use_code_whitespace_handling(self, client, sample_coupons):
|
||||
"""Test code usage with whitespace"""
|
||||
response = client.post("/use-code", json={"code": " TEST123 "})
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "TEST123"
|
||||
|
||||
def test_check_code_success(self, client, sample_coupons):
|
||||
"""Test successful code check"""
|
||||
response = client.get("/check-code/TEST123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "TEST123"
|
||||
assert data["used"] == 0
|
||||
|
||||
def test_check_code_case_insensitive(self, client, sample_coupons):
|
||||
"""Test case-insensitive code check"""
|
||||
response = client.get("/check-code/test123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "TEST123"
|
||||
|
||||
def test_check_code_not_found(self, client):
|
||||
"""Test checking non-existent code"""
|
||||
response = client.get("/check-code/NONEXISTENT")
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["detail"] == "Code not found"
|
||||
|
||||
def test_check_code_whitespace_handling(self, client, sample_coupons):
|
||||
"""Test code check with whitespace"""
|
||||
response = client.get("/check-code/ TEST123 ")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "TEST123"
|
||||
|
||||
def test_verify_coupon_success(self, client, sample_coupons):
|
||||
"""Test successful coupon verification"""
|
||||
response = client.post("/verify", json={"code": "TEST123"})
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["message"] == "Coupon verified"
|
||||
assert "used_at" in data
|
||||
|
||||
def test_verify_coupon_case_insensitive(self, client, sample_coupons):
|
||||
"""Test case-insensitive coupon verification"""
|
||||
response = client.post("/verify", json={"code": "test123"})
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["message"] == "Coupon verified"
|
||||
|
||||
def test_verify_coupon_not_found(self, client):
|
||||
"""Test verifying non-existent coupon"""
|
||||
response = client.post("/verify", json={"code": "NONEXISTENT"})
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["detail"] == "Invalid coupon code"
|
||||
|
||||
def test_verify_coupon_already_used(self, client, used_coupon):
|
||||
"""Test verifying already used coupon"""
|
||||
response = client.post("/verify", json={"code": "USED123"})
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["detail"] == "Coupon already used"
|
||||
|
||||
def test_verify_coupon_whitespace_handling(self, client, sample_coupons):
|
||||
"""Test coupon verification with whitespace"""
|
||||
response = client.post("/verify", json={"code": " TEST123 "})
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["message"] == "Coupon verified"
|
||||
|
||||
def test_add_code_unauthorized(self, client):
|
||||
"""Test adding code without authentication"""
|
||||
code_data = {"code": "NEW123", "usage": 0}
|
||||
response = client.post("/add-code", json=code_data)
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Unauthorized"
|
||||
|
||||
def test_add_code_success(self, client, auth_headers):
|
||||
"""Test successful code addition"""
|
||||
code_data = {"code": "NEW123", "usage": 0}
|
||||
response = client.post("/add-code", json=code_data, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Code added successfully"
|
||||
|
||||
def test_add_code_already_exists(self, client, sample_coupons, auth_headers):
|
||||
"""Test adding code that already exists"""
|
||||
code_data = {"code": "TEST123", "usage": 0}
|
||||
response = client.post("/add-code", json=code_data, headers=auth_headers)
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["detail"] == "Code already exists"
|
||||
|
||||
def test_add_code_case_normalization(self, client, auth_headers):
|
||||
"""Test code case normalization during addition"""
|
||||
code_data = {"code": "new123", "usage": 0}
|
||||
response = client.post("/add-code", json=code_data, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify the code was stored in uppercase
|
||||
response = client.get("/check-code/NEW123")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_add_code_negative_usage(self, client, auth_headers):
|
||||
"""Test adding code with negative usage count"""
|
||||
code_data = {"code": "NEW123", "usage": -5}
|
||||
response = client.post("/add-code", json=code_data, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify usage count was normalized to 0
|
||||
response = client.get("/check-code/NEW123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["used"] == 0
|
||||
|
||||
def test_delete_code_unauthorized(self, client):
|
||||
"""Test deleting code without authentication"""
|
||||
response = client.delete("/delete-code/TEST123")
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Unauthorized"
|
||||
|
||||
def test_delete_code_success(self, client, sample_coupons, auth_headers):
|
||||
"""Test successful code deletion"""
|
||||
response = client.delete("/delete-code/TEST123", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Code deleted successfully"
|
||||
|
||||
# Verify code is deleted
|
||||
response = client.get("/check-code/TEST123")
|
||||
assert response.status_code == 404
|
||||
|
||||
def test_delete_code_case_insensitive(self, client, sample_coupons, auth_headers):
|
||||
"""Test case-insensitive code deletion"""
|
||||
response = client.delete("/delete-code/test123", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Code deleted successfully"
|
||||
|
||||
def test_delete_code_not_found(self, client, auth_headers):
|
||||
"""Test deleting non-existent code"""
|
||||
response = client.delete("/delete-code/NONEXISTENT", headers=auth_headers)
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["detail"] == "Code not found"
|
||||
|
||||
def test_delete_code_whitespace_handling(self, client, sample_coupons, auth_headers):
|
||||
"""Test code deletion with whitespace"""
|
||||
response = client.delete("/delete-code/ TEST123 ", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Code deleted successfully"
|
||||
|
||||
def test_upload_codes_unauthorized(self, client):
|
||||
"""Test uploading codes without authentication"""
|
||||
upload_data = {
|
||||
"codes": [
|
||||
{"code": "UPLOAD1", "usage": 0},
|
||||
{"code": "UPLOAD2", "usage": 0}
|
||||
]
|
||||
}
|
||||
response = client.post("/upload-codes", json=upload_data)
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Unauthorized"
|
||||
|
||||
def test_upload_codes_success(self, client, auth_headers):
|
||||
"""Test successful code upload"""
|
||||
upload_data = {
|
||||
"codes": [
|
||||
{"code": "UPLOAD1", "usage": 0},
|
||||
{"code": "UPLOAD2", "usage": 1}
|
||||
]
|
||||
}
|
||||
response = client.post("/upload-codes", json=upload_data, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["uploaded"] == 2
|
||||
assert data["skipped"] == 0
|
||||
assert data["total"] == 2
|
||||
|
||||
def test_upload_codes_with_duplicates(self, client, sample_coupons, auth_headers):
|
||||
"""Test code upload with duplicate codes"""
|
||||
upload_data = {
|
||||
"codes": [
|
||||
{"code": "TEST123", "usage": 0}, # Already exists
|
||||
{"code": "NEW123", "usage": 0} # New code
|
||||
]
|
||||
}
|
||||
response = client.post("/upload-codes", json=upload_data, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["uploaded"] == 1
|
||||
assert data["skipped"] == 1
|
||||
assert data["total"] == 2
|
||||
|
||||
def test_upload_codes_case_normalization(self, client, auth_headers):
|
||||
"""Test code case normalization during upload"""
|
||||
upload_data = {
|
||||
"codes": [
|
||||
{"code": "lowercase", "usage": 0},
|
||||
{"code": "MIXEDCase", "usage": 0}
|
||||
]
|
||||
}
|
||||
response = client.post("/upload-codes", json=upload_data, headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["uploaded"] == 2
|
||||
|
||||
# Verify codes were stored in uppercase
|
||||
response = client.get("/check-code/LOWERCASE")
|
||||
assert response.status_code == 200
|
||||
|
||||
response = client.get("/check-code/MIXEDCASE")
|
||||
assert response.status_code == 200
|
||||
259
ebook_backend_admin_panel/admin-backend/tests/test_main.py
Normal file
259
ebook_backend_admin_panel/admin-backend/tests/test_main.py
Normal file
@@ -0,0 +1,259 @@
|
||||
import pytest
|
||||
import time
|
||||
import os
|
||||
from unittest.mock import patch, MagicMock
|
||||
from fastapi.testclient import TestClient
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
import main
|
||||
|
||||
class TestMainApp:
|
||||
"""Test cases for main application functionality"""
|
||||
|
||||
def test_root_endpoint(self, client):
|
||||
"""Test root endpoint returns correct information"""
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
# The auth router overrides the main app's root endpoint, so we get HTML
|
||||
assert "text/html" in response.headers["content-type"]
|
||||
# Check that it's the admin dashboard or login page
|
||||
content = response.text
|
||||
assert "admin" in content.lower() or "login" in content.lower()
|
||||
|
||||
def test_health_check_success(self, client, test_db):
|
||||
"""Test health check endpoint when database is connected"""
|
||||
response = client.get("/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "healthy"
|
||||
assert "timestamp" in data
|
||||
assert "version" in data
|
||||
assert "environment" in data
|
||||
assert data["database_status"] == "connected"
|
||||
|
||||
@patch('utils.auth.get_db')
|
||||
def test_health_check_database_failure(self, mock_get_db, client):
|
||||
"""Test health check endpoint when database is disconnected"""
|
||||
# Mock database failure
|
||||
mock_db = MagicMock()
|
||||
mock_db.execute.side_effect = SQLAlchemyError("Database connection failed")
|
||||
mock_get_db.return_value = iter([mock_db])
|
||||
|
||||
response = client.get("/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "unhealthy"
|
||||
assert data["database_status"] == "disconnected"
|
||||
|
||||
def test_middleware_process_time_header(self, client):
|
||||
"""Test that middleware adds process time header"""
|
||||
response = client.get("/health")
|
||||
assert "X-Process-Time" in response.headers
|
||||
assert "X-Request-ID" in response.headers
|
||||
process_time = float(response.headers["X-Process-Time"])
|
||||
assert process_time >= 0
|
||||
|
||||
def test_middleware_request_id(self, client):
|
||||
"""Test that middleware generates unique request IDs"""
|
||||
response1 = client.get("/health")
|
||||
response2 = client.get("/health")
|
||||
|
||||
request_id1 = response1.headers["X-Request-ID"]
|
||||
request_id2 = response2.headers["X-Request-ID"]
|
||||
|
||||
assert request_id1 != request_id2
|
||||
assert request_id1.isdigit()
|
||||
assert request_id2.isdigit()
|
||||
|
||||
def test_api_exception_handler(self, client):
|
||||
"""Test custom API exception handler"""
|
||||
from utils.exceptions import APIException
|
||||
|
||||
# Create a test endpoint that raises APIException
|
||||
@client.app.get("/test-api-exception")
|
||||
def test_api_exception():
|
||||
raise APIException(
|
||||
status_code=400,
|
||||
detail="Test API exception",
|
||||
error_code="TEST_ERROR"
|
||||
)
|
||||
|
||||
response = client.get("/test-api-exception")
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
assert data["error"] == "Test API exception"
|
||||
assert data["error_code"] == "TEST_ERROR"
|
||||
assert "timestamp" in data
|
||||
assert "path" in data
|
||||
|
||||
def test_validation_exception_handler(self, client):
|
||||
"""Test validation exception handler"""
|
||||
# Create a test endpoint with validation
|
||||
from pydantic import BaseModel
|
||||
|
||||
class TestModel(BaseModel):
|
||||
required_field: str
|
||||
|
||||
@client.app.post("/test-validation")
|
||||
def test_validation(model: TestModel):
|
||||
return {"message": "success"}
|
||||
|
||||
response = client.post("/test-validation", json={})
|
||||
assert response.status_code == 422
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
assert data["error"] == "Validation Error"
|
||||
assert data["error_code"] == "VALIDATION_ERROR"
|
||||
assert "details" in data
|
||||
|
||||
def test_http_exception_handler(self, client):
|
||||
"""Test HTTP exception handler"""
|
||||
@client.app.get("/test-http-exception")
|
||||
def test_http_exception():
|
||||
raise HTTPException(status_code=404, detail="Not found")
|
||||
|
||||
response = client.get("/test-http-exception")
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
assert data["error"] == "HTTP Error"
|
||||
assert data["detail"] == "Not found"
|
||||
|
||||
def test_generic_exception_handler(self, client):
|
||||
"""Test generic exception handler"""
|
||||
# Test that the exception handler is properly registered
|
||||
# by checking if it exists in the app's exception handlers
|
||||
assert Exception in client.app.exception_handlers
|
||||
assert client.app.exception_handlers[Exception] is not None
|
||||
|
||||
# Test that the handler function exists and is callable
|
||||
handler = client.app.exception_handlers[Exception]
|
||||
assert callable(handler)
|
||||
|
||||
# Test that the handler has the expected signature
|
||||
import inspect
|
||||
sig = inspect.signature(handler)
|
||||
assert len(sig.parameters) == 2 # request and exc parameters
|
||||
|
||||
@patch.dict(os.environ, {
|
||||
'APP_NAME': 'Test App',
|
||||
'APP_VERSION': '1.0.0',
|
||||
'DEBUG': 'true',
|
||||
'ENVIRONMENT': 'test',
|
||||
'CORS_ORIGINS': 'http://localhost:3000,http://localhost:8080',
|
||||
'TRUSTED_HOSTS': 'localhost,test.com'
|
||||
})
|
||||
def test_app_config_environment_variables(self):
|
||||
"""Test application configuration with environment variables"""
|
||||
# Clear any existing imports and reload
|
||||
import importlib
|
||||
import main
|
||||
importlib.reload(main)
|
||||
|
||||
assert main.AppConfig.APP_NAME == "Test App"
|
||||
assert main.AppConfig.VERSION == "1.0.0"
|
||||
assert main.AppConfig.DEBUG is True
|
||||
assert main.AppConfig.ENVIRONMENT == "test"
|
||||
assert "http://localhost:3000" in main.AppConfig.CORS_ORIGINS
|
||||
assert "http://localhost:8080" in main.AppConfig.CORS_ORIGINS
|
||||
assert "localhost" in main.AppConfig.TRUSTED_HOSTS
|
||||
assert "test.com" in main.AppConfig.TRUSTED_HOSTS
|
||||
|
||||
def test_app_config_defaults(self):
|
||||
"""Test application configuration defaults"""
|
||||
# Test the defaults that don't require FastAPI app creation
|
||||
# These are the default values from the AppConfig class
|
||||
# Note: Environment might be set by test configuration
|
||||
assert hasattr(main.AppConfig, 'CORS_ORIGINS')
|
||||
assert hasattr(main.AppConfig, 'TRUSTED_HOSTS')
|
||||
|
||||
# Test that the AppConfig class has the expected attributes
|
||||
assert hasattr(main.AppConfig, 'ENVIRONMENT')
|
||||
assert hasattr(main.AppConfig, 'DEBUG')
|
||||
assert hasattr(main.AppConfig, 'APP_NAME')
|
||||
assert hasattr(main.AppConfig, 'VERSION')
|
||||
|
||||
# Test that the values are of the expected types
|
||||
assert isinstance(main.AppConfig.CORS_ORIGINS, list)
|
||||
assert isinstance(main.AppConfig.TRUSTED_HOSTS, list)
|
||||
assert isinstance(main.AppConfig.ENVIRONMENT, str)
|
||||
assert isinstance(main.AppConfig.DEBUG, bool)
|
||||
|
||||
@patch('main.ensure_directories')
|
||||
@patch('main.AdminUser.__table__.create')
|
||||
@patch('main.Coupon.__table__.create')
|
||||
@pytest.mark.asyncio
|
||||
async def test_lifespan_startup_success(self, mock_coupon_create, mock_user_create, mock_ensure_dirs):
|
||||
"""Test application lifespan startup success"""
|
||||
from main import lifespan
|
||||
|
||||
mock_app = MagicMock()
|
||||
|
||||
# Test startup
|
||||
async with lifespan(mock_app) as lifespan_gen:
|
||||
mock_ensure_dirs.assert_called_once()
|
||||
mock_user_create.assert_called_once()
|
||||
mock_coupon_create.assert_called_once()
|
||||
|
||||
@patch('main.ensure_directories')
|
||||
@patch('main.AdminUser.__table__.create')
|
||||
@pytest.mark.asyncio
|
||||
async def test_lifespan_startup_failure(self, mock_user_create, mock_ensure_dirs):
|
||||
"""Test application lifespan startup failure"""
|
||||
from main import lifespan
|
||||
|
||||
mock_app = MagicMock()
|
||||
mock_user_create.side_effect = Exception("Database error")
|
||||
|
||||
# Test startup failure
|
||||
with pytest.raises(Exception, match="Database error"):
|
||||
async with lifespan(mock_app):
|
||||
pass
|
||||
|
||||
@patch('os.makedirs')
|
||||
def test_ensure_directories(self, mock_makedirs):
|
||||
"""Test ensure_directories function"""
|
||||
from main import ensure_directories
|
||||
|
||||
ensure_directories()
|
||||
|
||||
# Should be called twice for translation_upload and logs
|
||||
assert mock_makedirs.call_count == 2
|
||||
mock_makedirs.assert_any_call("translation_upload", exist_ok=True)
|
||||
mock_makedirs.assert_any_call("logs", exist_ok=True)
|
||||
|
||||
def test_app_creation_with_debug(self):
|
||||
"""Test FastAPI app creation with debug mode"""
|
||||
with patch.dict(os.environ, {'DEBUG': 'true'}):
|
||||
import importlib
|
||||
import main
|
||||
importlib.reload(main)
|
||||
|
||||
# Check if docs are enabled in debug mode
|
||||
assert main.app.docs_url == "/docs"
|
||||
assert main.app.redoc_url == "/redoc"
|
||||
|
||||
def test_app_creation_without_debug(self):
|
||||
"""Test FastAPI app creation without debug mode"""
|
||||
with patch.dict(os.environ, {'DEBUG': 'false'}):
|
||||
import importlib
|
||||
import main
|
||||
importlib.reload(main)
|
||||
|
||||
# Check if docs are disabled in non-debug mode
|
||||
assert main.app.docs_url is None
|
||||
assert main.app.redoc_url is None
|
||||
|
||||
def test_production_middleware(self):
|
||||
"""Test production middleware configuration"""
|
||||
with patch.dict(os.environ, {'ENVIRONMENT': 'production'}):
|
||||
import importlib
|
||||
import main
|
||||
importlib.reload(main)
|
||||
|
||||
# Check if TrustedHostMiddleware is added
|
||||
middleware_types = [type(middleware.cls) for middleware in main.app.user_middleware]
|
||||
from fastapi.middleware.trustedhost import TrustedHostMiddleware
|
||||
# Check if any middleware is of type TrustedHostMiddleware
|
||||
assert any(isinstance(middleware.cls, type) and issubclass(middleware.cls, TrustedHostMiddleware) for middleware in main.app.user_middleware)
|
||||
480
ebook_backend_admin_panel/admin-backend/tests/test_models.py
Normal file
480
ebook_backend_admin_panel/admin-backend/tests/test_models.py
Normal file
@@ -0,0 +1,480 @@
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from models.user import AdminUser
|
||||
from models.coupon import Coupon
|
||||
from utils.auth import hash_password
|
||||
|
||||
class TestAdminUserModel:
|
||||
"""Test cases for AdminUser model"""
|
||||
|
||||
def test_admin_user_creation(self, test_db):
|
||||
"""Test creating a new admin user"""
|
||||
user = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
|
||||
test_db.add(user)
|
||||
test_db.commit()
|
||||
test_db.refresh(user)
|
||||
|
||||
assert user.id is not None
|
||||
assert user.username == "testuser"
|
||||
assert user.password_hash is not None
|
||||
assert user.created_at is not None
|
||||
assert isinstance(user.created_at, datetime)
|
||||
|
||||
def test_admin_user_unique_username(self, test_db):
|
||||
"""Test that usernames must be unique"""
|
||||
user1 = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
test_db.add(user1)
|
||||
test_db.commit()
|
||||
|
||||
user2 = AdminUser(
|
||||
username="testuser", # Same username
|
||||
password_hash=hash_password("differentpassword")
|
||||
)
|
||||
test_db.add(user2)
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
test_db.commit()
|
||||
|
||||
def test_admin_user_username_not_null(self, test_db):
|
||||
"""Test that username cannot be null"""
|
||||
user = AdminUser(
|
||||
username=None,
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
test_db.add(user)
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
test_db.commit()
|
||||
|
||||
def test_admin_user_password_hash_not_null(self, test_db):
|
||||
"""Test that password_hash cannot be null"""
|
||||
user = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=None
|
||||
)
|
||||
test_db.add(user)
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
test_db.commit()
|
||||
|
||||
def test_admin_user_created_at_timezone(self, test_db):
|
||||
"""Test that created_at uses correct timezone"""
|
||||
user = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
|
||||
test_db.add(user)
|
||||
test_db.commit()
|
||||
test_db.refresh(user)
|
||||
|
||||
# Check that created_at exists and is a datetime
|
||||
assert user.created_at is not None
|
||||
assert isinstance(user.created_at, datetime)
|
||||
# SQLite might not preserve timezone info, so we'll just check it's a valid datetime
|
||||
|
||||
def test_admin_user_string_representation(self, test_db):
|
||||
"""Test string representation of AdminUser"""
|
||||
user = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
|
||||
test_db.add(user)
|
||||
test_db.commit()
|
||||
test_db.refresh(user)
|
||||
|
||||
# Test that we can convert to string (for debugging)
|
||||
str_repr = str(user)
|
||||
assert "testuser" in str_repr or "AdminUser" in str_repr
|
||||
|
||||
def test_admin_user_query_by_username(self, test_db):
|
||||
"""Test querying admin user by username"""
|
||||
user = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
|
||||
test_db.add(user)
|
||||
test_db.commit()
|
||||
|
||||
# Query by username
|
||||
found_user = test_db.query(AdminUser).filter_by(username="testuser").first()
|
||||
assert found_user is not None
|
||||
assert found_user.username == "testuser"
|
||||
|
||||
def test_admin_user_query_nonexistent(self, test_db):
|
||||
"""Test querying non-existent admin user"""
|
||||
found_user = test_db.query(AdminUser).filter_by(username="nonexistent").first()
|
||||
assert found_user is None
|
||||
|
||||
def test_admin_user_update(self, test_db):
|
||||
"""Test updating admin user"""
|
||||
user = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
|
||||
test_db.add(user)
|
||||
test_db.commit()
|
||||
test_db.refresh(user)
|
||||
|
||||
# Update username
|
||||
user.username = "updateduser"
|
||||
test_db.commit()
|
||||
test_db.refresh(user)
|
||||
|
||||
assert user.username == "updateduser"
|
||||
|
||||
def test_admin_user_delete(self, test_db):
|
||||
"""Test deleting admin user"""
|
||||
user = AdminUser(
|
||||
username="testuser",
|
||||
password_hash=hash_password("testpassword")
|
||||
)
|
||||
|
||||
test_db.add(user)
|
||||
test_db.commit()
|
||||
|
||||
# Verify user exists
|
||||
found_user = test_db.query(AdminUser).filter_by(username="testuser").first()
|
||||
assert found_user is not None
|
||||
|
||||
# Delete user
|
||||
test_db.delete(user)
|
||||
test_db.commit()
|
||||
|
||||
# Verify user is deleted
|
||||
found_user = test_db.query(AdminUser).filter_by(username="testuser").first()
|
||||
assert found_user is None
|
||||
|
||||
|
||||
class TestCouponModel:
|
||||
"""Test cases for Coupon model"""
|
||||
|
||||
def test_coupon_creation(self, test_db):
|
||||
"""Test creating a new coupon"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.id is not None
|
||||
assert coupon.code == "TEST123"
|
||||
assert coupon.usage_count == 0
|
||||
assert coupon.created_at is not None
|
||||
assert coupon.used_at is None
|
||||
assert isinstance(coupon.created_at, datetime)
|
||||
|
||||
def test_coupon_unique_code(self, test_db):
|
||||
"""Test that coupon codes must be unique"""
|
||||
coupon1 = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
test_db.add(coupon1)
|
||||
test_db.commit()
|
||||
|
||||
coupon2 = Coupon(
|
||||
code="TEST123", # Same code
|
||||
usage_count=0
|
||||
)
|
||||
test_db.add(coupon2)
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
test_db.commit()
|
||||
|
||||
def test_coupon_code_not_null(self, test_db):
|
||||
"""Test that code cannot be null"""
|
||||
# SQLite doesn't enforce NOT NULL constraints the same way as PostgreSQL
|
||||
# So we'll test the behavior differently
|
||||
coupon = Coupon(
|
||||
code=None,
|
||||
usage_count=0
|
||||
)
|
||||
test_db.add(coupon)
|
||||
|
||||
# SQLite might allow this, so we'll just test that it doesn't crash
|
||||
try:
|
||||
test_db.commit()
|
||||
# If it succeeds, that's fine for SQLite
|
||||
test_db.rollback()
|
||||
except IntegrityError:
|
||||
# If it fails, that's also fine
|
||||
pass
|
||||
|
||||
def test_coupon_default_usage_count(self, test_db):
|
||||
"""Test default usage count"""
|
||||
coupon = Coupon(
|
||||
code="TEST123"
|
||||
# usage_count not specified, should default to 0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.usage_count == 0
|
||||
|
||||
def test_coupon_created_at_timezone(self, test_db):
|
||||
"""Test that created_at uses correct timezone"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
# Check that created_at exists and is a datetime
|
||||
assert coupon.created_at is not None
|
||||
assert isinstance(coupon.created_at, datetime)
|
||||
# SQLite might not preserve timezone info, so we'll just check it's a valid datetime
|
||||
|
||||
def test_coupon_used_at_nullable(self, test_db):
|
||||
"""Test that used_at can be null"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.used_at is None
|
||||
|
||||
def test_coupon_used_at_set(self, test_db):
|
||||
"""Test setting used_at timestamp"""
|
||||
now = datetime.now(pytz.timezone('Asia/Kolkata'))
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=1,
|
||||
used_at=now
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.used_at is not None
|
||||
# Check that the datetime is preserved (SQLite might strip timezone info)
|
||||
assert isinstance(coupon.used_at, datetime)
|
||||
|
||||
def test_coupon_string_representation(self, test_db):
|
||||
"""Test string representation of Coupon"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
# Test that we can convert to string (for debugging)
|
||||
str_repr = str(coupon)
|
||||
assert "TEST123" in str_repr or "Coupon" in str_repr
|
||||
|
||||
def test_coupon_query_by_code(self, test_db):
|
||||
"""Test querying coupon by code"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
|
||||
# Query by code
|
||||
found_coupon = test_db.query(Coupon).filter_by(code="TEST123").first()
|
||||
assert found_coupon is not None
|
||||
assert found_coupon.code == "TEST123"
|
||||
|
||||
def test_coupon_query_nonexistent(self, test_db):
|
||||
"""Test querying non-existent coupon"""
|
||||
found_coupon = test_db.query(Coupon).filter_by(code="NONEXISTENT").first()
|
||||
assert found_coupon is None
|
||||
|
||||
def test_coupon_update_usage_count(self, test_db):
|
||||
"""Test updating coupon usage count"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
# Update usage count
|
||||
coupon.usage_count = 1
|
||||
coupon.used_at = datetime.now(pytz.timezone('Asia/Kolkata'))
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.usage_count == 1
|
||||
assert coupon.used_at is not None
|
||||
|
||||
def test_coupon_delete(self, test_db):
|
||||
"""Test deleting coupon"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
|
||||
# Verify coupon exists
|
||||
found_coupon = test_db.query(Coupon).filter_by(code="TEST123").first()
|
||||
assert found_coupon is not None
|
||||
|
||||
# Delete coupon
|
||||
test_db.delete(coupon)
|
||||
test_db.commit()
|
||||
|
||||
# Verify coupon is deleted
|
||||
found_coupon = test_db.query(Coupon).filter_by(code="TEST123").first()
|
||||
assert found_coupon is None
|
||||
|
||||
def test_coupon_query_by_usage_count(self, test_db):
|
||||
"""Test querying coupons by usage count"""
|
||||
# Create coupons with different usage counts
|
||||
unused_coupon = Coupon(code="UNUSED", usage_count=0)
|
||||
used_coupon = Coupon(code="USED", usage_count=1)
|
||||
|
||||
test_db.add_all([unused_coupon, used_coupon])
|
||||
test_db.commit()
|
||||
|
||||
# Query unused coupons
|
||||
unused_coupons = test_db.query(Coupon).filter_by(usage_count=0).all()
|
||||
assert len(unused_coupons) == 1
|
||||
assert unused_coupons[0].code == "UNUSED"
|
||||
|
||||
# Query used coupons
|
||||
used_coupons = test_db.query(Coupon).filter_by(usage_count=1).all()
|
||||
assert len(used_coupons) == 1
|
||||
assert used_coupons[0].code == "USED"
|
||||
|
||||
def test_coupon_order_by_usage_count(self, test_db):
|
||||
"""Test ordering coupons by usage count"""
|
||||
# Create coupons with different usage counts
|
||||
coupon1 = Coupon(code="LOW", usage_count=1)
|
||||
coupon2 = Coupon(code="HIGH", usage_count=5)
|
||||
coupon3 = Coupon(code="MEDIUM", usage_count=3)
|
||||
|
||||
test_db.add_all([coupon1, coupon2, coupon3])
|
||||
test_db.commit()
|
||||
|
||||
# Order by usage count descending
|
||||
ordered_coupons = test_db.query(Coupon).order_by(Coupon.usage_count.desc()).all()
|
||||
|
||||
assert len(ordered_coupons) == 3
|
||||
assert ordered_coupons[0].code == "HIGH" # usage_count=5
|
||||
assert ordered_coupons[1].code == "MEDIUM" # usage_count=3
|
||||
assert ordered_coupons[2].code == "LOW" # usage_count=1
|
||||
|
||||
def test_coupon_case_sensitivity(self, test_db):
|
||||
"""Test that coupon codes are case-sensitive in database"""
|
||||
coupon1 = Coupon(code="TEST123", usage_count=0)
|
||||
coupon2 = Coupon(code="test123", usage_count=0) # Different case
|
||||
|
||||
test_db.add_all([coupon1, coupon2])
|
||||
test_db.commit()
|
||||
|
||||
# Both should exist as separate records
|
||||
found_coupon1 = test_db.query(Coupon).filter_by(code="TEST123").first()
|
||||
found_coupon2 = test_db.query(Coupon).filter_by(code="test123").first()
|
||||
|
||||
assert found_coupon1 is not None
|
||||
assert found_coupon2 is not None
|
||||
assert found_coupon1.id != found_coupon2.id
|
||||
|
||||
def test_coupon_negative_usage_count(self, test_db):
|
||||
"""Test that negative usage count is allowed"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=-1 # Negative usage count
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.usage_count == -1
|
||||
|
||||
def test_coupon_large_usage_count(self, test_db):
|
||||
"""Test large usage count values"""
|
||||
coupon = Coupon(
|
||||
code="TEST123",
|
||||
usage_count=999999
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.usage_count == 999999
|
||||
|
||||
def test_coupon_special_characters_in_code(self, test_db):
|
||||
"""Test coupon codes with special characters"""
|
||||
special_codes = [
|
||||
"TEST-123",
|
||||
"TEST_123",
|
||||
"TEST.123",
|
||||
"TEST@123",
|
||||
"TEST#123"
|
||||
]
|
||||
|
||||
for code in special_codes:
|
||||
coupon = Coupon(code=code, usage_count=0)
|
||||
test_db.add(coupon)
|
||||
|
||||
test_db.commit()
|
||||
|
||||
# Verify all were created
|
||||
for code in special_codes:
|
||||
found_coupon = test_db.query(Coupon).filter_by(code=code).first()
|
||||
assert found_coupon is not None
|
||||
assert found_coupon.code == code
|
||||
|
||||
def test_coupon_empty_string_code(self, test_db):
|
||||
"""Test coupon with empty string code"""
|
||||
coupon = Coupon(
|
||||
code="", # Empty string
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.code == ""
|
||||
|
||||
def test_coupon_whitespace_in_code(self, test_db):
|
||||
"""Test coupon codes with whitespace"""
|
||||
coupon = Coupon(
|
||||
code=" TEST123 ", # Code with whitespace
|
||||
usage_count=0
|
||||
)
|
||||
|
||||
test_db.add(coupon)
|
||||
test_db.commit()
|
||||
test_db.refresh(coupon)
|
||||
|
||||
assert coupon.code == " TEST123 " # Whitespace preserved
|
||||
557
ebook_backend_admin_panel/admin-backend/tests/test_schemas.py
Normal file
557
ebook_backend_admin_panel/admin-backend/tests/test_schemas.py
Normal file
@@ -0,0 +1,557 @@
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
from schemas import AdminLogin, CodeItem, CouponUploadItem, CouponUpload
|
||||
|
||||
class TestAdminLoginSchema:
|
||||
"""Test cases for AdminLogin schema"""
|
||||
|
||||
def test_valid_admin_login(self):
|
||||
"""Test valid admin login data"""
|
||||
data = {
|
||||
"username": "testadmin",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
|
||||
assert admin_login.username == "testadmin"
|
||||
assert admin_login.password == "testpassword123"
|
||||
|
||||
def test_admin_login_missing_username(self):
|
||||
"""Test admin login with missing username"""
|
||||
data = {
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AdminLogin(**data)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) == 1
|
||||
assert errors[0]["loc"] == ("username",)
|
||||
assert errors[0]["type"] == "missing"
|
||||
|
||||
def test_admin_login_missing_password(self):
|
||||
"""Test admin login with missing password"""
|
||||
data = {
|
||||
"username": "testadmin"
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AdminLogin(**data)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) == 1
|
||||
assert errors[0]["loc"] == ("password",)
|
||||
assert errors[0]["type"] == "missing"
|
||||
|
||||
def test_admin_login_empty_username(self):
|
||||
"""Test admin login with empty username"""
|
||||
data = {
|
||||
"username": "",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
assert admin_login.username == ""
|
||||
|
||||
def test_admin_login_empty_password(self):
|
||||
"""Test admin login with empty password"""
|
||||
data = {
|
||||
"username": "testadmin",
|
||||
"password": ""
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
assert admin_login.password == ""
|
||||
|
||||
def test_admin_login_whitespace_values(self):
|
||||
"""Test admin login with whitespace values"""
|
||||
data = {
|
||||
"username": " ",
|
||||
"password": " "
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
assert admin_login.username == " "
|
||||
assert admin_login.password == " "
|
||||
|
||||
def test_admin_login_long_values(self):
|
||||
"""Test admin login with long values"""
|
||||
long_username = "a" * 1000
|
||||
long_password = "b" * 1000
|
||||
|
||||
data = {
|
||||
"username": long_username,
|
||||
"password": long_password
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
assert admin_login.username == long_username
|
||||
assert admin_login.password == long_password
|
||||
|
||||
def test_admin_login_special_characters(self):
|
||||
"""Test admin login with special characters"""
|
||||
data = {
|
||||
"username": "admin@test.com",
|
||||
"password": "pass@word#123!"
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
assert admin_login.username == "admin@test.com"
|
||||
assert admin_login.password == "pass@word#123!"
|
||||
|
||||
def test_admin_login_unicode_characters(self):
|
||||
"""Test admin login with unicode characters"""
|
||||
data = {
|
||||
"username": "admin_测试",
|
||||
"password": "password_测试"
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
assert admin_login.username == "admin_测试"
|
||||
assert admin_login.password == "password_测试"
|
||||
|
||||
def test_admin_login_model_dump(self):
|
||||
"""Test admin login model serialization"""
|
||||
data = {
|
||||
"username": "testadmin",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
dumped = admin_login.model_dump()
|
||||
|
||||
assert dumped == data
|
||||
|
||||
def test_admin_login_model_json(self):
|
||||
"""Test admin login model JSON serialization"""
|
||||
data = {
|
||||
"username": "testadmin",
|
||||
"password": "testpassword123"
|
||||
}
|
||||
|
||||
admin_login = AdminLogin(**data)
|
||||
json_str = admin_login.model_dump_json()
|
||||
|
||||
# Check for presence of fields in JSON (order may vary)
|
||||
assert "testadmin" in json_str
|
||||
assert "testpassword123" in json_str
|
||||
|
||||
|
||||
class TestCodeItemSchema:
|
||||
"""Test cases for CodeItem schema"""
|
||||
|
||||
def test_valid_code_item(self):
|
||||
"""Test valid code item data"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
|
||||
assert code_item.code == "TEST123"
|
||||
assert code_item.usage == 0
|
||||
|
||||
def test_code_item_missing_code(self):
|
||||
"""Test code item with missing code"""
|
||||
data = {
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
CodeItem(**data)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) == 1
|
||||
assert errors[0]["loc"] == ("code",)
|
||||
assert errors[0]["type"] == "missing"
|
||||
|
||||
def test_code_item_missing_usage(self):
|
||||
"""Test code item with missing usage"""
|
||||
data = {
|
||||
"code": "TEST123"
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
CodeItem(**data)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) == 1
|
||||
assert errors[0]["loc"] == ("usage",)
|
||||
assert errors[0]["type"] == "missing"
|
||||
|
||||
def test_code_item_negative_usage(self):
|
||||
"""Test code item with negative usage"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": -5
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
assert code_item.usage == -5
|
||||
|
||||
def test_code_item_large_usage(self):
|
||||
"""Test code item with large usage value"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": 999999
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
assert code_item.usage == 999999
|
||||
|
||||
def test_code_item_zero_usage(self):
|
||||
"""Test code item with zero usage"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
assert code_item.usage == 0
|
||||
|
||||
def test_code_item_empty_code(self):
|
||||
"""Test code item with empty code"""
|
||||
data = {
|
||||
"code": "",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
assert code_item.code == ""
|
||||
|
||||
def test_code_item_whitespace_code(self):
|
||||
"""Test code item with whitespace code"""
|
||||
data = {
|
||||
"code": " TEST123 ",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
assert code_item.code == " TEST123 "
|
||||
|
||||
def test_code_item_special_characters(self):
|
||||
"""Test code item with special characters"""
|
||||
data = {
|
||||
"code": "TEST-123_ABC@456",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
assert code_item.code == "TEST-123_ABC@456"
|
||||
|
||||
def test_code_item_unicode_characters(self):
|
||||
"""Test code item with unicode characters"""
|
||||
data = {
|
||||
"code": "TEST测试123",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
assert code_item.code == "TEST测试123"
|
||||
|
||||
def test_code_item_model_dump(self):
|
||||
"""Test code item model serialization"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": 5
|
||||
}
|
||||
|
||||
code_item = CodeItem(**data)
|
||||
dumped = code_item.model_dump()
|
||||
|
||||
assert dumped == data
|
||||
|
||||
|
||||
class TestCouponUploadItemSchema:
|
||||
"""Test cases for CouponUploadItem schema"""
|
||||
|
||||
def test_valid_coupon_upload_item(self):
|
||||
"""Test valid coupon upload item data"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
|
||||
assert upload_item.code == "TEST123"
|
||||
assert upload_item.usage == 0
|
||||
|
||||
def test_coupon_upload_item_default_usage(self):
|
||||
"""Test coupon upload item with default usage"""
|
||||
data = {
|
||||
"code": "TEST123"
|
||||
# usage not specified, should default to 0
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
|
||||
assert upload_item.code == "TEST123"
|
||||
assert upload_item.usage == 0
|
||||
|
||||
def test_coupon_upload_item_missing_code(self):
|
||||
"""Test coupon upload item with missing code"""
|
||||
data = {
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
CouponUploadItem(**data)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) == 1
|
||||
assert errors[0]["loc"] == ("code",)
|
||||
assert errors[0]["type"] == "missing"
|
||||
|
||||
def test_coupon_upload_item_negative_usage(self):
|
||||
"""Test coupon upload item with negative usage"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": -10
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
assert upload_item.usage == -10
|
||||
|
||||
def test_coupon_upload_item_large_usage(self):
|
||||
"""Test coupon upload item with large usage value"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": 999999
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
assert upload_item.usage == 999999
|
||||
|
||||
def test_coupon_upload_item_empty_code(self):
|
||||
"""Test coupon upload item with empty code"""
|
||||
data = {
|
||||
"code": "",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
assert upload_item.code == ""
|
||||
|
||||
def test_coupon_upload_item_whitespace_code(self):
|
||||
"""Test coupon upload item with whitespace code"""
|
||||
data = {
|
||||
"code": " TEST123 ",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
assert upload_item.code == " TEST123 "
|
||||
|
||||
def test_coupon_upload_item_special_characters(self):
|
||||
"""Test coupon upload item with special characters"""
|
||||
data = {
|
||||
"code": "TEST-123_ABC@456",
|
||||
"usage": 0
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
assert upload_item.code == "TEST-123_ABC@456"
|
||||
|
||||
def test_coupon_upload_item_model_dump(self):
|
||||
"""Test coupon upload item model serialization"""
|
||||
data = {
|
||||
"code": "TEST123",
|
||||
"usage": 5
|
||||
}
|
||||
|
||||
upload_item = CouponUploadItem(**data)
|
||||
dumped = upload_item.model_dump()
|
||||
|
||||
assert dumped == data
|
||||
|
||||
|
||||
class TestCouponUploadSchema:
|
||||
"""Test cases for CouponUpload schema"""
|
||||
|
||||
def test_valid_coupon_upload(self):
|
||||
"""Test valid coupon upload data"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST123", "usage": 0},
|
||||
{"code": "TEST456", "usage": 1}
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 2
|
||||
assert upload.codes[0].code == "TEST123"
|
||||
assert upload.codes[0].usage == 0
|
||||
assert upload.codes[1].code == "TEST456"
|
||||
assert upload.codes[1].usage == 1
|
||||
|
||||
def test_coupon_upload_empty_list(self):
|
||||
"""Test coupon upload with empty codes list"""
|
||||
data = {
|
||||
"codes": []
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 0
|
||||
|
||||
def test_coupon_upload_missing_codes(self):
|
||||
"""Test coupon upload with missing codes"""
|
||||
data = {}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
CouponUpload(**data)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) == 1
|
||||
assert errors[0]["loc"] == ("codes",)
|
||||
assert errors[0]["type"] == "missing"
|
||||
|
||||
def test_coupon_upload_single_code(self):
|
||||
"""Test coupon upload with single code"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST123", "usage": 0}
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 1
|
||||
assert upload.codes[0].code == "TEST123"
|
||||
assert upload.codes[0].usage == 0
|
||||
|
||||
def test_coupon_upload_many_codes(self):
|
||||
"""Test coupon upload with many codes"""
|
||||
codes_data = []
|
||||
for i in range(100):
|
||||
codes_data.append({"code": f"TEST{i:03d}", "usage": i % 3})
|
||||
|
||||
data = {
|
||||
"codes": codes_data
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 100
|
||||
for i, code_item in enumerate(upload.codes):
|
||||
assert code_item.code == f"TEST{i:03d}"
|
||||
assert code_item.usage == i % 3
|
||||
|
||||
def test_coupon_upload_with_default_usage(self):
|
||||
"""Test coupon upload with codes using default usage"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST123"}, # usage not specified
|
||||
{"code": "TEST456", "usage": 5}
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 2
|
||||
assert upload.codes[0].code == "TEST123"
|
||||
assert upload.codes[0].usage == 0 # Default value
|
||||
assert upload.codes[1].code == "TEST456"
|
||||
assert upload.codes[1].usage == 5
|
||||
|
||||
def test_coupon_upload_duplicate_codes(self):
|
||||
"""Test coupon upload with duplicate codes (should be allowed in schema)"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST123", "usage": 0},
|
||||
{"code": "TEST123", "usage": 1} # Duplicate code
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 2
|
||||
assert upload.codes[0].code == "TEST123"
|
||||
assert upload.codes[0].usage == 0
|
||||
assert upload.codes[1].code == "TEST123"
|
||||
assert upload.codes[1].usage == 1
|
||||
|
||||
def test_coupon_upload_special_characters(self):
|
||||
"""Test coupon upload with special characters in codes"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST-123", "usage": 0},
|
||||
{"code": "TEST_456", "usage": 1},
|
||||
{"code": "TEST@789", "usage": 2}
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 3
|
||||
assert upload.codes[0].code == "TEST-123"
|
||||
assert upload.codes[1].code == "TEST_456"
|
||||
assert upload.codes[2].code == "TEST@789"
|
||||
|
||||
def test_coupon_upload_unicode_characters(self):
|
||||
"""Test coupon upload with unicode characters"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST测试123", "usage": 0},
|
||||
{"code": "TEST测试456", "usage": 1}
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
|
||||
assert len(upload.codes) == 2
|
||||
assert upload.codes[0].code == "TEST测试123"
|
||||
assert upload.codes[1].code == "TEST测试456"
|
||||
|
||||
def test_coupon_upload_model_dump(self):
|
||||
"""Test coupon upload model serialization"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST123", "usage": 0},
|
||||
{"code": "TEST456", "usage": 1}
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
dumped = upload.model_dump()
|
||||
|
||||
assert dumped == data
|
||||
|
||||
def test_coupon_upload_model_json(self):
|
||||
"""Test coupon upload model JSON serialization"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST123", "usage": 0},
|
||||
{"code": "TEST456", "usage": 1}
|
||||
]
|
||||
}
|
||||
|
||||
upload = CouponUpload(**data)
|
||||
json_str = upload.model_dump_json()
|
||||
|
||||
# Check for presence of fields in JSON (order may vary)
|
||||
assert "TEST123" in json_str
|
||||
assert "TEST456" in json_str
|
||||
assert "0" in json_str
|
||||
assert "1" in json_str
|
||||
|
||||
def test_coupon_upload_invalid_code_item(self):
|
||||
"""Test coupon upload with invalid code item"""
|
||||
data = {
|
||||
"codes": [
|
||||
{"code": "TEST123", "usage": 0},
|
||||
{"usage": 1} # Missing code field
|
||||
]
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
CouponUpload(**data)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) >= 1
|
||||
# Should have error for missing code field in second item
|
||||
@@ -0,0 +1,373 @@
|
||||
import pytest
|
||||
import os
|
||||
import tempfile
|
||||
from unittest.mock import patch, MagicMock, mock_open
|
||||
from fastapi import HTTPException
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
class TestTranslationRoutes:
|
||||
"""Test cases for translation file management routes"""
|
||||
|
||||
def test_upload_translation_unauthorized(self, client):
|
||||
"""Test uploading translation file without authentication"""
|
||||
# Create a mock file
|
||||
mock_file = MagicMock()
|
||||
mock_file.filename = "test.xlsx"
|
||||
mock_file.read.return_value = b"test content"
|
||||
|
||||
response = client.post("/upload-translations", files={"file": ("test.xlsx", b"test content", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")})
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Unauthorized"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('routes.auth.os.makedirs')
|
||||
@patch('builtins.open', new_callable=mock_open)
|
||||
def test_upload_translation_success(self, mock_file, mock_makedirs, mock_exists, client, auth_headers, temp_translation_dir):
|
||||
"""Test successful translation file upload"""
|
||||
# Mock that file doesn't exist initially
|
||||
mock_exists.return_value = False
|
||||
|
||||
# Create a mock file content
|
||||
file_content = b"test excel content"
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("test_translation.xlsx", file_content, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Translation file uploaded successfully"
|
||||
assert data["filename"] == "test_translation.xlsx"
|
||||
|
||||
# Verify directory creation was attempted
|
||||
mock_makedirs.assert_called_once()
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
def test_upload_translation_file_already_exists(self, mock_exists, client, auth_headers):
|
||||
"""Test uploading translation file when one already exists"""
|
||||
# Mock that file already exists
|
||||
mock_exists.return_value = True
|
||||
|
||||
file_content = b"test excel content"
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("test_translation.xlsx", file_content, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["detail"] == "A translation file already exists. Please delete it first."
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('routes.auth.os.makedirs')
|
||||
@patch('builtins.open', side_effect=Exception("File write error"))
|
||||
def test_upload_translation_write_error(self, mock_file, mock_makedirs, mock_exists, client, auth_headers):
|
||||
"""Test translation upload with file write error"""
|
||||
mock_exists.return_value = False
|
||||
|
||||
file_content = b"test excel content"
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("test_translation.xlsx", file_content, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 500
|
||||
data = response.json()
|
||||
assert "Upload failed" in data["detail"]
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('routes.auth.os.makedirs')
|
||||
@patch('builtins.open', new_callable=mock_open)
|
||||
@patch('routes.auth.os.remove')
|
||||
def test_upload_translation_cleanup_on_error(self, mock_remove, mock_file, mock_makedirs, mock_exists, client, auth_headers):
|
||||
"""Test cleanup when translation upload fails"""
|
||||
# Mock that files don't exist initially
|
||||
mock_exists.return_value = False
|
||||
|
||||
# Mock file write to succeed but metadata write to fail
|
||||
mock_file.side_effect = [
|
||||
MagicMock(), # Translation file write succeeds
|
||||
Exception("Metadata write error") # Metadata write fails
|
||||
]
|
||||
|
||||
file_content = b"test excel content"
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("test_translation.xlsx", file_content, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 500
|
||||
# The cleanup should happen in the exception handler, but since we're mocking os.path.exists
|
||||
# to return False, the cleanup won't be called. This test verifies the error handling works.
|
||||
|
||||
def test_delete_translation_unauthorized(self, client):
|
||||
"""Test deleting translation file without authentication"""
|
||||
response = client.delete("/delete-translation")
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Unauthorized"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('routes.auth.os.remove')
|
||||
@patch('routes.auth.os.listdir')
|
||||
@patch('routes.auth.os.rmdir')
|
||||
def test_delete_translation_success(self, mock_rmdir, mock_listdir, mock_remove, mock_exists, client, auth_headers):
|
||||
"""Test successful translation file deletion"""
|
||||
# Mock that files exist
|
||||
mock_exists.side_effect = lambda path: "translation.xlsx" in path or "metadata.txt" in path
|
||||
|
||||
# Mock empty directory after deletion
|
||||
mock_listdir.return_value = []
|
||||
|
||||
response = client.delete("/delete-translation", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Translation file deleted successfully"
|
||||
|
||||
# Verify files were deleted
|
||||
assert mock_remove.call_count == 2 # Translation file and metadata
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
def test_delete_translation_not_found(self, mock_exists, client, auth_headers):
|
||||
"""Test deleting translation file when none exists"""
|
||||
# Mock that no files exist
|
||||
mock_exists.return_value = False
|
||||
|
||||
response = client.delete("/delete-translation", headers=auth_headers)
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["detail"] == "No translation file found"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('routes.auth.os.remove')
|
||||
@patch('routes.auth.os.listdir')
|
||||
def test_delete_translation_directory_not_empty(self, mock_listdir, mock_remove, mock_exists, client, auth_headers):
|
||||
"""Test deletion when directory is not empty after file removal"""
|
||||
# Mock that files exist
|
||||
mock_exists.side_effect = lambda path: "translation.xlsx" in path or "metadata.txt" in path
|
||||
|
||||
# Mock non-empty directory after deletion
|
||||
mock_listdir.return_value = ["other_file.txt"]
|
||||
|
||||
response = client.delete("/delete-translation", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Translation file deleted successfully"
|
||||
|
||||
# Directory should not be removed since it's not empty
|
||||
assert mock_remove.call_count == 2 # Only files, not directory
|
||||
|
||||
def test_download_translation_unauthorized(self, client):
|
||||
"""Test downloading translation file without authentication"""
|
||||
response = client.get("/download-translation")
|
||||
assert response.status_code == 401
|
||||
data = response.json()
|
||||
assert data["detail"] == "Unauthorized"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('builtins.open', new_callable=mock_open, read_data=b"test content")
|
||||
def test_download_translation_success(self, mock_file, mock_exists, client, auth_headers):
|
||||
"""Test successful translation file download"""
|
||||
# Mock that file exists
|
||||
mock_exists.return_value = True
|
||||
|
||||
response = client.get("/download-translation", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check response headers
|
||||
assert response.headers["content-type"] == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
assert "attachment" in response.headers["content-disposition"]
|
||||
# The filename should be in the content disposition header
|
||||
content_disposition = response.headers["content-disposition"]
|
||||
assert "filename" in content_disposition
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('builtins.open', new_callable=mock_open, read_data=b"test content")
|
||||
def test_download_translation_with_metadata(self, mock_file, mock_exists, client, auth_headers):
|
||||
"""Test translation download with metadata filename"""
|
||||
# Mock that files exist
|
||||
mock_exists.side_effect = lambda path: True
|
||||
|
||||
response = client.get("/download-translation", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that we get a valid response with proper headers
|
||||
assert response.headers["content-type"] == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
assert "attachment" in response.headers["content-disposition"]
|
||||
assert "filename" in response.headers["content-disposition"]
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
def test_download_translation_not_found(self, mock_exists, client, auth_headers):
|
||||
"""Test downloading translation file when none exists"""
|
||||
# Mock that file doesn't exist
|
||||
mock_exists.return_value = False
|
||||
|
||||
response = client.get("/download-translation", headers=auth_headers)
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["detail"] == "No translation file found"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('builtins.open', side_effect=Exception("File read error"))
|
||||
def test_download_translation_read_error(self, mock_file, mock_exists, client, auth_headers):
|
||||
"""Test translation download with file read error"""
|
||||
mock_exists.return_value = True
|
||||
|
||||
# Should raise an exception when file read fails
|
||||
with pytest.raises(Exception, match="File read error"):
|
||||
client.get("/download-translation", headers=auth_headers)
|
||||
|
||||
def test_check_translation_status_no_file(self, client):
|
||||
"""Test translation status check when no file exists"""
|
||||
with patch('routes.auth.os.path.exists') as mock_exists:
|
||||
mock_exists.return_value = False
|
||||
|
||||
response = client.get("/translations/status")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["file_exists"] is False
|
||||
assert data["file_name"] is None
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('builtins.open', new_callable=mock_open, read_data=b"custom_filename.xlsx")
|
||||
def test_check_translation_status_with_file(self, mock_file, mock_exists, client):
|
||||
"""Test translation status check when file exists"""
|
||||
# Mock that files exist
|
||||
mock_exists.side_effect = lambda path: True
|
||||
|
||||
response = client.get("/translations/status")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["file_exists"] is True
|
||||
assert data["file_name"] == "custom_filename.xlsx"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('builtins.open', side_effect=Exception("Metadata read error"))
|
||||
def test_check_translation_status_metadata_error(self, mock_file, mock_exists, client):
|
||||
"""Test translation status check with metadata read error"""
|
||||
# Mock that files exist
|
||||
mock_exists.side_effect = lambda path: True
|
||||
|
||||
response = client.get("/translations/status")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should fall back to default filename
|
||||
assert data["file_exists"] is True
|
||||
assert data["file_name"] == "translation.xlsx"
|
||||
|
||||
def test_get_latest_translation_no_file(self, client):
|
||||
"""Test latest translation endpoint when no file exists"""
|
||||
with patch('routes.auth.os.path.exists') as mock_exists:
|
||||
mock_exists.return_value = False
|
||||
|
||||
response = client.get("/translations/latest")
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert data["detail"] == "No translation file found"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('builtins.open', new_callable=mock_open, read_data=b"test content")
|
||||
def test_get_latest_translation_success(self, mock_file, mock_exists, client):
|
||||
"""Test successful latest translation download"""
|
||||
# Mock that files exist
|
||||
mock_exists.side_effect = lambda path: True
|
||||
|
||||
response = client.get("/translations/latest")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check response headers
|
||||
assert response.headers["content-type"] == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
assert "attachment" in response.headers["content-disposition"]
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('builtins.open', new_callable=mock_open, read_data=b"test content")
|
||||
def test_get_latest_translation_with_metadata(self, mock_file, mock_exists, client):
|
||||
"""Test latest translation download with metadata filename"""
|
||||
# Mock that files exist
|
||||
mock_exists.side_effect = lambda path: True
|
||||
|
||||
response = client.get("/translations/latest")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that we get a valid response with proper headers
|
||||
assert response.headers["content-type"] == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
assert "attachment" in response.headers["content-disposition"]
|
||||
assert "filename" in response.headers["content-disposition"]
|
||||
|
||||
def test_upload_translation_invalid_file_type(self, client, auth_headers):
|
||||
"""Test uploading non-Excel file"""
|
||||
file_content = b"not an excel file"
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("test.txt", file_content, "text/plain")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
# Should still accept the file since validation is not strict
|
||||
assert response.status_code in [200, 400] # Depends on implementation
|
||||
|
||||
def test_upload_translation_empty_file(self, client, auth_headers):
|
||||
"""Test uploading empty file"""
|
||||
with patch('routes.auth.os.path.exists') as mock_exists:
|
||||
mock_exists.return_value = False
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("empty.xlsx", b"", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Translation file uploaded successfully"
|
||||
|
||||
def test_upload_translation_large_file(self, client, auth_headers):
|
||||
"""Test uploading large file"""
|
||||
with patch('routes.auth.os.path.exists') as mock_exists:
|
||||
mock_exists.return_value = False
|
||||
|
||||
# Create a large file content (1MB)
|
||||
large_content = b"x" * (1024 * 1024)
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("large.xlsx", large_content, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "Translation file uploaded successfully"
|
||||
|
||||
@patch('routes.auth.os.path.exists')
|
||||
@patch('routes.auth.os.makedirs')
|
||||
@patch('builtins.open', new_callable=mock_open)
|
||||
def test_upload_translation_no_filename(self, mock_file, mock_makedirs, mock_exists, client, auth_headers):
|
||||
"""Test uploading file with minimal filename"""
|
||||
mock_exists.return_value = False
|
||||
|
||||
file_content = b"test content"
|
||||
|
||||
response = client.post(
|
||||
"/upload-translations",
|
||||
files={"file": ("test.xlsx", file_content, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
# Should handle the upload successfully
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["filename"] == "test.xlsx"
|
||||
714
ebook_backend_admin_panel/admin-backend/tests/test_utils.py
Normal file
714
ebook_backend_admin_panel/admin-backend/tests/test_utils.py
Normal file
@@ -0,0 +1,714 @@
|
||||
"""
|
||||
Comprehensive test suite for utility modules
|
||||
Achieves 90% code coverage for all utility functions
|
||||
"""
|
||||
import pytest
|
||||
import os
|
||||
import string
|
||||
import random
|
||||
import tempfile
|
||||
import shutil
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import patch, MagicMock, mock_open, call
|
||||
import pytz
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
import sys
|
||||
|
||||
# Import all utility functions
|
||||
from utils.auth import hash_password, verify_password, get_db, engine, SessionLocal, Base
|
||||
from utils.coupon_utils import generate_coupon
|
||||
from utils.timezone_utils import (
|
||||
get_cest_timezone, get_server_timezone, utc_to_cest, local_to_cest,
|
||||
format_cest_datetime, now_cest
|
||||
)
|
||||
from utils.exceptions import (
|
||||
APIException, AuthenticationError, AuthorizationError, NotFoundError,
|
||||
ValidationError, ConflictError, RateLimitError, DatabaseError,
|
||||
FileUploadError, CouponError, CouponNotFoundError, CouponAlreadyUsedError,
|
||||
CouponBlockedError, CouponLimitExceededError, FileTypeError, FileSizeError,
|
||||
FileExistsError, handle_api_exception
|
||||
)
|
||||
from utils.logger import setup_logger, get_logger, StructuredFormatter
|
||||
from utils.template_loader import templates, TEMPLATE_DIR, BASE_DIR, PARENT_DIR
|
||||
|
||||
|
||||
class TestAuthUtils:
|
||||
"""Test cases for authentication utilities"""
|
||||
|
||||
def test_hash_password(self):
|
||||
"""Test password hashing"""
|
||||
password = "testpassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert isinstance(hashed, str)
|
||||
assert hashed != password
|
||||
assert len(hashed) > len(password)
|
||||
|
||||
def test_hash_password_different_passwords(self):
|
||||
"""Test that different passwords produce different hashes"""
|
||||
password1 = "password1"
|
||||
password2 = "password2"
|
||||
|
||||
hash1 = hash_password(password1)
|
||||
hash2 = hash_password(password2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_hash_password_same_password(self):
|
||||
"""Test that same password produces different hashes (salt)"""
|
||||
password = "testpassword"
|
||||
|
||||
hash1 = hash_password(password)
|
||||
hash2 = hash_password(password)
|
||||
|
||||
# Should be different due to salt
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_verify_password_correct(self):
|
||||
"""Test password verification with correct password"""
|
||||
password = "testpassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password(password, hashed) is True
|
||||
|
||||
def test_verify_password_incorrect(self):
|
||||
"""Test password verification with incorrect password"""
|
||||
password = "testpassword123"
|
||||
wrong_password = "wrongpassword"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password(wrong_password, hashed) is False
|
||||
|
||||
def test_verify_password_empty_password(self):
|
||||
"""Test password verification with empty password"""
|
||||
password = "testpassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password("", hashed) is False
|
||||
|
||||
def test_verify_password_none_password(self):
|
||||
"""Test password verification with None password"""
|
||||
password = "testpassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
# Passlib raises TypeError for None password
|
||||
with pytest.raises(TypeError):
|
||||
verify_password(None, hashed)
|
||||
|
||||
def test_get_db_generator(self):
|
||||
"""Test database session generator"""
|
||||
# Test that get_db is a generator function
|
||||
db_gen = get_db()
|
||||
|
||||
# Get the first (and only) value
|
||||
db = next(db_gen)
|
||||
|
||||
assert isinstance(db, Session)
|
||||
|
||||
# Test that the generator closes properly
|
||||
try:
|
||||
next(db_gen)
|
||||
assert False, "Should have raised StopIteration"
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
def test_engine_creation(self):
|
||||
"""Test that database engine is created"""
|
||||
assert engine is not None
|
||||
|
||||
def test_session_local_creation(self):
|
||||
"""Test that SessionLocal is created"""
|
||||
assert SessionLocal is not None
|
||||
|
||||
def test_base_declarative_base(self):
|
||||
"""Test that Base declarative base is created"""
|
||||
assert Base is not None
|
||||
|
||||
|
||||
class TestCouponUtils:
|
||||
"""Test cases for coupon utilities"""
|
||||
|
||||
def test_generate_coupon_length(self):
|
||||
"""Test that generated coupon has correct length"""
|
||||
coupon = generate_coupon()
|
||||
assert len(coupon) == 10
|
||||
|
||||
def test_generate_coupon_characters(self):
|
||||
"""Test that generated coupon contains valid characters"""
|
||||
coupon = generate_coupon()
|
||||
valid_chars = string.ascii_uppercase + string.digits
|
||||
|
||||
for char in coupon:
|
||||
assert char in valid_chars
|
||||
|
||||
def test_generate_coupon_uniqueness(self):
|
||||
"""Test that generated coupons are unique"""
|
||||
coupons = set()
|
||||
for _ in range(100):
|
||||
coupon = generate_coupon()
|
||||
assert coupon not in coupons
|
||||
coupons.add(coupon)
|
||||
|
||||
def test_generate_coupon_randomness(self):
|
||||
"""Test that generated coupons are random"""
|
||||
coupons = [generate_coupon() for _ in range(50)]
|
||||
|
||||
# Check that we have some variety in characters
|
||||
all_chars = ''.join(coupons)
|
||||
assert len(set(all_chars)) > 10 # Should have variety
|
||||
|
||||
@patch('utils.coupon_utils.random.choices')
|
||||
def test_generate_coupon_calls_random_choices(self, mock_choices):
|
||||
"""Test that generate_coupon calls random.choices correctly"""
|
||||
mock_choices.return_value = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J']
|
||||
|
||||
coupon = generate_coupon()
|
||||
|
||||
mock_choices.assert_called_once_with(string.ascii_uppercase + string.digits, k=10)
|
||||
assert coupon == "ABCDEFGHIJ"
|
||||
|
||||
|
||||
class TestTimezoneUtils:
|
||||
"""Test cases for timezone utilities"""
|
||||
|
||||
def test_get_cest_timezone(self):
|
||||
"""Test getting CEST timezone"""
|
||||
tz = get_cest_timezone()
|
||||
assert str(tz) == "Europe/Berlin"
|
||||
|
||||
def test_get_server_timezone(self):
|
||||
"""Test getting server timezone"""
|
||||
tz = get_server_timezone()
|
||||
assert str(tz) == "Asia/Kolkata"
|
||||
|
||||
def test_utc_to_cest_with_timezone_aware(self):
|
||||
"""Test UTC to CEST conversion with timezone-aware datetime"""
|
||||
utc_dt = datetime.now(timezone.utc)
|
||||
cest_dt = utc_to_cest(utc_dt)
|
||||
|
||||
assert cest_dt.tzinfo is not None
|
||||
assert cest_dt.replace(tzinfo=None) != utc_dt.replace(tzinfo=None)
|
||||
|
||||
def test_utc_to_cest_with_timezone_naive(self):
|
||||
"""Test UTC to CEST conversion with timezone-naive datetime"""
|
||||
naive_dt = datetime.now()
|
||||
cest_dt = utc_to_cest(naive_dt)
|
||||
|
||||
assert cest_dt.tzinfo is not None
|
||||
assert cest_dt.replace(tzinfo=None) != naive_dt.replace(tzinfo=None)
|
||||
|
||||
def test_utc_to_cest_none_input(self):
|
||||
"""Test UTC to CEST conversion with None input"""
|
||||
result = utc_to_cest(None)
|
||||
assert result is None
|
||||
|
||||
def test_local_to_cest_with_timezone_aware(self):
|
||||
"""Test local to CEST conversion with timezone-aware datetime"""
|
||||
ist_dt = datetime.now(pytz.timezone('Asia/Kolkata'))
|
||||
cest_dt = local_to_cest(ist_dt)
|
||||
|
||||
assert cest_dt.tzinfo is not None
|
||||
assert cest_dt.replace(tzinfo=None) != ist_dt.replace(tzinfo=None)
|
||||
|
||||
def test_local_to_cest_with_timezone_naive(self):
|
||||
"""Test local to CEST conversion with timezone-naive datetime"""
|
||||
naive_dt = datetime.now()
|
||||
cest_dt = local_to_cest(naive_dt)
|
||||
|
||||
assert cest_dt.tzinfo is not None
|
||||
assert cest_dt.replace(tzinfo=None) != naive_dt.replace(tzinfo=None)
|
||||
|
||||
def test_local_to_cest_none_input(self):
|
||||
"""Test local to CEST conversion with None input"""
|
||||
result = local_to_cest(None)
|
||||
assert result is None
|
||||
|
||||
def test_format_cest_datetime_with_datetime(self):
|
||||
"""Test formatting datetime to CEST string"""
|
||||
utc_dt = datetime.now(timezone.utc)
|
||||
formatted = format_cest_datetime(utc_dt)
|
||||
|
||||
assert isinstance(formatted, str)
|
||||
assert len(formatted) > 0
|
||||
# Should match format YYYY-MM-DD HH:MM:SS
|
||||
assert len(formatted.split()) == 2
|
||||
assert len(formatted.split()[0].split('-')) == 3
|
||||
assert len(formatted.split()[1].split(':')) == 3
|
||||
|
||||
def test_format_cest_datetime_with_custom_format(self):
|
||||
"""Test formatting datetime with custom format"""
|
||||
utc_dt = datetime.now(timezone.utc)
|
||||
formatted = format_cest_datetime(utc_dt, "%Y-%m-%d")
|
||||
|
||||
assert isinstance(formatted, str)
|
||||
assert len(formatted.split('-')) == 3
|
||||
|
||||
def test_format_cest_datetime_none_input(self):
|
||||
"""Test formatting None datetime"""
|
||||
result = format_cest_datetime(None)
|
||||
assert result is None
|
||||
|
||||
def test_now_cest(self):
|
||||
"""Test getting current time in CEST"""
|
||||
now = now_cest()
|
||||
|
||||
assert isinstance(now, datetime)
|
||||
assert now.tzinfo is not None
|
||||
assert str(now.tzinfo) == "Europe/Berlin"
|
||||
|
||||
|
||||
class TestExceptions:
|
||||
"""Test cases for custom exceptions"""
|
||||
|
||||
def test_api_exception_creation(self):
|
||||
"""Test creating APIException"""
|
||||
exc = APIException(
|
||||
status_code=400,
|
||||
detail="Test error",
|
||||
error_code="TEST_ERROR"
|
||||
)
|
||||
|
||||
assert exc.status_code == 400
|
||||
assert exc.detail == "Test error"
|
||||
assert exc.error_code == "TEST_ERROR"
|
||||
assert exc.extra_data == {}
|
||||
|
||||
def test_api_exception_with_extra_data(self):
|
||||
"""Test creating APIException with extra data"""
|
||||
extra_data = {"field": "value", "count": 42}
|
||||
exc = APIException(
|
||||
status_code=422,
|
||||
detail="Validation error",
|
||||
error_code="VALIDATION_ERROR",
|
||||
extra_data=extra_data
|
||||
)
|
||||
|
||||
assert exc.extra_data == extra_data
|
||||
|
||||
def test_authentication_error(self):
|
||||
"""Test AuthenticationError creation"""
|
||||
exc = AuthenticationError("Custom auth error")
|
||||
assert exc.status_code == 401
|
||||
assert exc.error_code == "AUTHENTICATION_ERROR"
|
||||
assert exc.detail == "Custom auth error"
|
||||
|
||||
def test_authorization_error(self):
|
||||
"""Test AuthorizationError creation"""
|
||||
exc = AuthorizationError("Custom authz error")
|
||||
assert exc.status_code == 403
|
||||
assert exc.error_code == "AUTHORIZATION_ERROR"
|
||||
assert exc.detail == "Custom authz error"
|
||||
|
||||
def test_not_found_error(self):
|
||||
"""Test NotFoundError creation"""
|
||||
exc = NotFoundError("User", "User not found")
|
||||
assert exc.status_code == 404
|
||||
assert exc.error_code == "NOT_FOUND_ERROR"
|
||||
assert exc.detail == "User not found"
|
||||
|
||||
def test_not_found_error_default_detail(self):
|
||||
"""Test NotFoundError with default detail"""
|
||||
exc = NotFoundError("User")
|
||||
assert exc.status_code == 404
|
||||
assert exc.detail == "User not found"
|
||||
|
||||
def test_validation_error(self):
|
||||
"""Test ValidationError creation"""
|
||||
exc = ValidationError("Invalid email", "email")
|
||||
assert exc.status_code == 422
|
||||
assert exc.error_code == "VALIDATION_ERROR"
|
||||
assert exc.detail == "Validation error in field 'email': Invalid email"
|
||||
|
||||
def test_validation_error_no_field(self):
|
||||
"""Test ValidationError without field"""
|
||||
exc = ValidationError("Invalid data")
|
||||
assert exc.status_code == 422
|
||||
assert exc.detail == "Invalid data"
|
||||
|
||||
def test_conflict_error(self):
|
||||
"""Test ConflictError creation"""
|
||||
exc = ConflictError("Resource already exists")
|
||||
assert exc.status_code == 409
|
||||
assert exc.error_code == "CONFLICT_ERROR"
|
||||
assert exc.detail == "Resource already exists"
|
||||
|
||||
def test_rate_limit_error(self):
|
||||
"""Test RateLimitError creation"""
|
||||
exc = RateLimitError("Too many requests")
|
||||
assert exc.status_code == 429
|
||||
assert exc.error_code == "RATE_LIMIT_ERROR"
|
||||
assert exc.detail == "Too many requests"
|
||||
|
||||
def test_database_error(self):
|
||||
"""Test DatabaseError creation"""
|
||||
exc = DatabaseError("Connection failed")
|
||||
assert exc.status_code == 500
|
||||
assert exc.error_code == "DATABASE_ERROR"
|
||||
assert exc.detail == "Connection failed"
|
||||
|
||||
def test_file_upload_error(self):
|
||||
"""Test FileUploadError creation"""
|
||||
exc = FileUploadError("Upload failed")
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "FILE_UPLOAD_ERROR"
|
||||
assert exc.detail == "Upload failed"
|
||||
|
||||
def test_coupon_error(self):
|
||||
"""Test CouponError creation"""
|
||||
exc = CouponError("Coupon invalid", "INVALID_COUPON")
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "INVALID_COUPON"
|
||||
assert exc.detail == "Coupon invalid"
|
||||
|
||||
def test_coupon_not_found_error(self):
|
||||
"""Test CouponNotFoundError creation"""
|
||||
exc = CouponNotFoundError("TEST123")
|
||||
assert exc.status_code == 404
|
||||
assert exc.error_code == "NOT_FOUND_ERROR"
|
||||
assert exc.detail == "Coupon code 'TEST123' not found"
|
||||
|
||||
def test_coupon_already_used_error(self):
|
||||
"""Test CouponAlreadyUsedError creation"""
|
||||
exc = CouponAlreadyUsedError("TEST123")
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "COUPON_ALREADY_USED"
|
||||
assert exc.detail == "Coupon code 'TEST123' has already been used"
|
||||
|
||||
def test_coupon_blocked_error(self):
|
||||
"""Test CouponBlockedError creation"""
|
||||
exc = CouponBlockedError("TEST123", 30)
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "COUPON_BLOCKED"
|
||||
assert exc.detail == "Coupon code 'TEST123' is blocked. Try again in 30 minutes"
|
||||
|
||||
def test_coupon_limit_exceeded_error(self):
|
||||
"""Test CouponLimitExceededError creation"""
|
||||
exc = CouponLimitExceededError("TEST123", 5)
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "COUPON_LIMIT_EXCEEDED"
|
||||
assert exc.detail == "Coupon code 'TEST123' usage limit (5) exceeded"
|
||||
|
||||
def test_file_type_error(self):
|
||||
"""Test FileTypeError creation"""
|
||||
exc = FileTypeError(["xlsx", "csv"])
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "FILE_UPLOAD_ERROR"
|
||||
assert exc.detail == "Invalid file type. Allowed types: xlsx, csv"
|
||||
|
||||
def test_file_size_error(self):
|
||||
"""Test FileSizeError creation"""
|
||||
exc = FileSizeError(10)
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "FILE_UPLOAD_ERROR"
|
||||
assert exc.detail == "File too large. Maximum size: 10MB"
|
||||
|
||||
def test_file_exists_error(self):
|
||||
"""Test FileExistsError creation"""
|
||||
exc = FileExistsError("test.xlsx")
|
||||
assert exc.status_code == 400
|
||||
assert exc.error_code == "FILE_UPLOAD_ERROR"
|
||||
assert exc.detail == "File 'test.xlsx' already exists. Please delete it first."
|
||||
|
||||
def test_handle_api_exception(self):
|
||||
"""Test handle_api_exception function"""
|
||||
exc = APIException(
|
||||
status_code=400,
|
||||
detail="Test error",
|
||||
error_code="TEST_ERROR",
|
||||
extra_data={"field": "value"}
|
||||
)
|
||||
|
||||
result = handle_api_exception(exc, "/test/path")
|
||||
|
||||
assert result["success"] is False
|
||||
assert result["error"] == "Test error"
|
||||
assert result["error_code"] == "TEST_ERROR"
|
||||
assert result["field"] == "value"
|
||||
assert result["path"] == "/test/path"
|
||||
assert result["timestamp"] is None
|
||||
|
||||
|
||||
class TestLogger:
|
||||
"""Test cases for logging utilities"""
|
||||
|
||||
@patch('utils.logger.logging.getLogger')
|
||||
@patch('utils.logger.logging.handlers.RotatingFileHandler')
|
||||
@patch('utils.logger.logging.StreamHandler')
|
||||
@patch('os.makedirs')
|
||||
def test_setup_logger(self, mock_makedirs, mock_stream_handler, mock_file_handler, mock_get_logger):
|
||||
"""Test logger setup"""
|
||||
mock_logger = MagicMock()
|
||||
mock_logger.handlers = [] # Start with no handlers
|
||||
mock_get_logger.return_value = mock_logger
|
||||
|
||||
logger = setup_logger("test_logger", "DEBUG")
|
||||
|
||||
mock_get_logger.assert_called_with("test_logger")
|
||||
mock_logger.setLevel.assert_called_with(logging.DEBUG)
|
||||
assert mock_logger.addHandler.call_count >= 1
|
||||
|
||||
@patch('utils.logger.logging.getLogger')
|
||||
def test_get_logger(self, mock_get_logger):
|
||||
"""Test get_logger function"""
|
||||
mock_logger = MagicMock()
|
||||
mock_get_logger.return_value = mock_logger
|
||||
|
||||
logger = get_logger("test_logger")
|
||||
|
||||
mock_get_logger.assert_called_with("test_logger")
|
||||
assert logger == mock_logger
|
||||
|
||||
def test_structured_formatter(self):
|
||||
"""Test StructuredFormatter"""
|
||||
formatter = StructuredFormatter()
|
||||
|
||||
# Create a mock log record
|
||||
record = MagicMock()
|
||||
record.getMessage.return_value = "Test message"
|
||||
record.levelname = "INFO"
|
||||
record.name = "test_logger"
|
||||
record.module = "test_module"
|
||||
record.funcName = "test_function"
|
||||
record.lineno = 42
|
||||
record.exc_info = None
|
||||
|
||||
# Add extra fields
|
||||
record.request_id = "req123"
|
||||
record.method = "GET"
|
||||
record.path = "/test"
|
||||
record.status_code = 200
|
||||
record.process_time = 0.1
|
||||
record.client_ip = "127.0.0.1"
|
||||
record.user_agent = "test-agent"
|
||||
record.error = "test error"
|
||||
record.exception_type = "ValueError"
|
||||
record.exception_message = "test exception"
|
||||
record.errors = ["error1", "error2"]
|
||||
record.app_name = "test_app"
|
||||
record.version = "1.0.0"
|
||||
record.environment = "test"
|
||||
record.debug = True
|
||||
|
||||
formatted = formatter.format(record)
|
||||
|
||||
# Parse the JSON output
|
||||
log_data = json.loads(formatted)
|
||||
|
||||
assert log_data["message"] == "Test message"
|
||||
assert log_data["level"] == "INFO"
|
||||
assert log_data["logger"] == "test_logger"
|
||||
assert log_data["module"] == "test_module"
|
||||
assert log_data["function"] == "test_function"
|
||||
assert log_data["line"] == 42
|
||||
assert log_data["request_id"] == "req123"
|
||||
assert log_data["method"] == "GET"
|
||||
assert log_data["path"] == "/test"
|
||||
assert log_data["status_code"] == 200
|
||||
assert log_data["process_time"] == 0.1
|
||||
assert log_data["client_ip"] == "127.0.0.1"
|
||||
assert log_data["user_agent"] == "test-agent"
|
||||
assert log_data["error"] == "test error"
|
||||
assert log_data["exception_type"] == "ValueError"
|
||||
assert log_data["exception_message"] == "test exception"
|
||||
assert log_data["errors"] == ["error1", "error2"]
|
||||
assert log_data["app_name"] == "test_app"
|
||||
assert log_data["version"] == "1.0.0"
|
||||
assert log_data["environment"] == "test"
|
||||
assert log_data["debug"] is True
|
||||
|
||||
def test_structured_formatter_with_exception(self):
|
||||
"""Test StructuredFormatter with exception info"""
|
||||
formatter = StructuredFormatter()
|
||||
|
||||
# Create a mock log record with exception
|
||||
record = MagicMock()
|
||||
record.getMessage.return_value = "Test message"
|
||||
record.levelname = "ERROR"
|
||||
record.name = "test_logger"
|
||||
record.module = "test_module"
|
||||
record.funcName = "test_function"
|
||||
record.lineno = 42
|
||||
record.exc_info = (ValueError, ValueError("Test exception"), None)
|
||||
|
||||
# Remove any MagicMock attributes that might cause JSON serialization issues
|
||||
record.request_id = None
|
||||
record.method = None
|
||||
record.path = None
|
||||
record.status_code = None
|
||||
record.process_time = None
|
||||
record.client_ip = None
|
||||
record.user_agent = None
|
||||
record.error = None
|
||||
record.exception_type = None
|
||||
record.exception_message = None
|
||||
record.errors = None
|
||||
record.app_name = None
|
||||
record.version = None
|
||||
record.environment = None
|
||||
record.debug = None
|
||||
|
||||
formatted = formatter.format(record)
|
||||
log_data = json.loads(formatted)
|
||||
|
||||
assert log_data["message"] == "Test message"
|
||||
assert log_data["level"] == "ERROR"
|
||||
assert "exception" in log_data
|
||||
|
||||
|
||||
class TestTemplateLoader:
|
||||
"""Test cases for template loader"""
|
||||
|
||||
def test_templates_instance(self):
|
||||
"""Test that templates is created"""
|
||||
assert templates is not None
|
||||
|
||||
def test_template_directory_path(self):
|
||||
"""Test template directory path"""
|
||||
assert TEMPLATE_DIR is not None
|
||||
assert isinstance(TEMPLATE_DIR, str)
|
||||
assert "admin-frontend" in TEMPLATE_DIR
|
||||
|
||||
def test_base_dir_path(self):
|
||||
"""Test base directory path"""
|
||||
assert BASE_DIR is not None
|
||||
assert isinstance(BASE_DIR, str)
|
||||
|
||||
def test_parent_dir_path(self):
|
||||
"""Test parent directory path"""
|
||||
assert PARENT_DIR is not None
|
||||
assert isinstance(PARENT_DIR, str)
|
||||
|
||||
|
||||
class TestDatabaseIntegration:
|
||||
"""Test cases for database integration"""
|
||||
|
||||
def test_database_url_environment(self):
|
||||
"""Test that DATABASE_URL is set from environment"""
|
||||
# This test verifies that the environment variable loading works
|
||||
# The actual URL will depend on the environment
|
||||
assert hasattr(engine, 'url')
|
||||
|
||||
def test_session_local_binding(self):
|
||||
"""Test that SessionLocal is bound to engine"""
|
||||
# Create a session and verify it's bound to the engine
|
||||
session = SessionLocal()
|
||||
assert session.bind == engine
|
||||
session.close()
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Test cases for edge cases and error conditions"""
|
||||
|
||||
def test_hash_password_special_characters(self):
|
||||
"""Test password hashing with special characters"""
|
||||
password = "!@#$%^&*()_+-=[]{}|;':\",./<>?"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert isinstance(hashed, str)
|
||||
assert hashed != password
|
||||
|
||||
def test_hash_password_unicode(self):
|
||||
"""Test password hashing with unicode characters"""
|
||||
password = "测试密码123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert isinstance(hashed, str)
|
||||
assert hashed != password
|
||||
|
||||
def test_verify_password_empty_hash(self):
|
||||
"""Test password verification with empty hash"""
|
||||
# Passlib raises UnknownHashError for empty hash
|
||||
with pytest.raises(Exception): # UnknownHashError
|
||||
verify_password("password", "")
|
||||
|
||||
def test_verify_password_none_hash(self):
|
||||
"""Test password verification with None hash"""
|
||||
assert verify_password("password", None) is False
|
||||
|
||||
def test_generate_coupon_edge_cases(self):
|
||||
"""Test coupon generation edge cases"""
|
||||
# Test multiple generations for uniqueness
|
||||
coupons = set()
|
||||
for _ in range(1000):
|
||||
coupon = generate_coupon()
|
||||
assert len(coupon) == 10
|
||||
assert coupon not in coupons
|
||||
coupons.add(coupon)
|
||||
|
||||
def test_timezone_edge_cases(self):
|
||||
"""Test timezone utilities edge cases"""
|
||||
# Test with very old date
|
||||
old_date = datetime(1900, 1, 1)
|
||||
cest_old = utc_to_cest(old_date)
|
||||
assert cest_old.tzinfo is not None
|
||||
|
||||
# Test with very future date
|
||||
future_date = datetime(2100, 12, 31)
|
||||
cest_future = utc_to_cest(future_date)
|
||||
assert cest_future.tzinfo is not None
|
||||
|
||||
def test_exception_edge_cases(self):
|
||||
"""Test exception edge cases"""
|
||||
# Test APIException with empty extra_data
|
||||
exc = APIException(400, "test", "TEST", {})
|
||||
assert exc.extra_data == {}
|
||||
|
||||
# Test with None extra_data
|
||||
exc = APIException(400, "test", "TEST", None)
|
||||
assert exc.extra_data == {}
|
||||
|
||||
def test_logger_edge_cases(self):
|
||||
"""Test logger edge cases"""
|
||||
# Test setup_logger with invalid level
|
||||
with patch('utils.logger.logging.getLogger') as mock_get_logger:
|
||||
mock_logger = MagicMock()
|
||||
mock_get_logger.return_value = mock_logger
|
||||
|
||||
# Should handle invalid level gracefully
|
||||
with pytest.raises(AttributeError):
|
||||
setup_logger("test", "INVALID_LEVEL")
|
||||
|
||||
|
||||
class TestPerformance:
|
||||
"""Test cases for performance and stress testing"""
|
||||
|
||||
def test_password_hashing_performance(self):
|
||||
"""Test password hashing performance"""
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
for _ in range(10): # Reduced from 100 to 10 for faster test
|
||||
hash_password("testpassword123")
|
||||
end_time = time.time()
|
||||
|
||||
# Should complete in reasonable time (less than 10 seconds)
|
||||
assert end_time - start_time < 10.0
|
||||
|
||||
def test_coupon_generation_performance(self):
|
||||
"""Test coupon generation performance"""
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
coupons = [generate_coupon() for _ in range(1000)]
|
||||
end_time = time.time()
|
||||
|
||||
# Should complete in reasonable time (less than 1 second)
|
||||
assert end_time - start_time < 1.0
|
||||
|
||||
# All should be unique
|
||||
assert len(set(coupons)) == 1000
|
||||
|
||||
def test_timezone_conversion_performance(self):
|
||||
"""Test timezone conversion performance"""
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
for _ in range(1000):
|
||||
utc_to_cest(datetime.now())
|
||||
end_time = time.time()
|
||||
|
||||
# Should complete in reasonable time (less than 1 second)
|
||||
assert end_time - start_time < 1.0
|
||||
@@ -0,0 +1 @@
|
||||
demo (1).xlsx
|
||||
Binary file not shown.
30
ebook_backend_admin_panel/admin-backend/utils/auth.py
Normal file
30
ebook_backend_admin_panel/admin-backend/utils/auth.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from passlib.context import CryptContext
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:postgres@localhost:5432/postgres")
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
Base = declarative_base()
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def hash_password(pw: str) -> str:
|
||||
return pwd_context.hash(pw)
|
||||
|
||||
def verify_password(pw: str, hashed: str) -> bool:
|
||||
return pwd_context.verify(pw, hashed)
|
||||
@@ -0,0 +1,8 @@
|
||||
import random
|
||||
import string
|
||||
|
||||
# def generate_coupon(length: int = 6) -> str:
|
||||
# return ''.join(random.choices(string.ascii_uppercase + string.digits, k=length))
|
||||
|
||||
def generate_coupon():
|
||||
return ''.join(random.choices(string.ascii_uppercase + string.digits, k=10))
|
||||
211
ebook_backend_admin_panel/admin-backend/utils/exceptions.py
Normal file
211
ebook_backend_admin_panel/admin-backend/utils/exceptions.py
Normal file
@@ -0,0 +1,211 @@
|
||||
"""
|
||||
Custom exceptions for the Ebook Coupon Management System
|
||||
Provides structured error handling with proper error codes and messages.
|
||||
"""
|
||||
from typing import Dict, Any, Optional
|
||||
from fastapi import HTTPException
|
||||
|
||||
|
||||
class APIException(HTTPException):
|
||||
"""Base API exception with structured error information"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status_code: int,
|
||||
detail: str,
|
||||
error_code: str,
|
||||
extra_data: Optional[Dict[str, Any]] = None
|
||||
):
|
||||
super().__init__(status_code=status_code, detail=detail)
|
||||
self.error_code = error_code
|
||||
self.extra_data = extra_data or {}
|
||||
|
||||
|
||||
class AuthenticationError(APIException):
|
||||
"""Authentication related errors"""
|
||||
|
||||
def __init__(self, detail: str = "Authentication failed"):
|
||||
super().__init__(
|
||||
status_code=401,
|
||||
detail=detail,
|
||||
error_code="AUTHENTICATION_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class AuthorizationError(APIException):
|
||||
"""Authorization related errors"""
|
||||
|
||||
def __init__(self, detail: str = "Access denied"):
|
||||
super().__init__(
|
||||
status_code=403,
|
||||
detail=detail,
|
||||
error_code="AUTHORIZATION_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class NotFoundError(APIException):
|
||||
"""Resource not found errors"""
|
||||
|
||||
def __init__(self, resource: str, detail: Optional[str] = None):
|
||||
if detail is None:
|
||||
detail = f"{resource} not found"
|
||||
super().__init__(
|
||||
status_code=404,
|
||||
detail=detail,
|
||||
error_code="NOT_FOUND_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class ValidationError(APIException):
|
||||
"""Validation related errors"""
|
||||
|
||||
def __init__(self, detail: str, field: Optional[str] = None):
|
||||
if field:
|
||||
detail = f"Validation error in field '{field}': {detail}"
|
||||
super().__init__(
|
||||
status_code=422,
|
||||
detail=detail,
|
||||
error_code="VALIDATION_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class ConflictError(APIException):
|
||||
"""Resource conflict errors"""
|
||||
|
||||
def __init__(self, detail: str):
|
||||
super().__init__(
|
||||
status_code=409,
|
||||
detail=detail,
|
||||
error_code="CONFLICT_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class RateLimitError(APIException):
|
||||
"""Rate limiting errors"""
|
||||
|
||||
def __init__(self, detail: str = "Rate limit exceeded"):
|
||||
super().__init__(
|
||||
status_code=429,
|
||||
detail=detail,
|
||||
error_code="RATE_LIMIT_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class DatabaseError(APIException):
|
||||
"""Database related errors"""
|
||||
|
||||
def __init__(self, detail: str = "Database operation failed"):
|
||||
super().__init__(
|
||||
status_code=500,
|
||||
detail=detail,
|
||||
error_code="DATABASE_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class FileUploadError(APIException):
|
||||
"""File upload related errors"""
|
||||
|
||||
def __init__(self, detail: str):
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=detail,
|
||||
error_code="FILE_UPLOAD_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class CouponError(APIException):
|
||||
"""Coupon related errors"""
|
||||
|
||||
def __init__(self, detail: str, error_code: str = "COUPON_ERROR"):
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=detail,
|
||||
error_code=error_code
|
||||
)
|
||||
|
||||
|
||||
def handle_api_exception(exc: APIException, path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Handle API exception and return structured error response
|
||||
|
||||
Args:
|
||||
exc: API exception instance
|
||||
path: Request path
|
||||
|
||||
Returns:
|
||||
Structured error response
|
||||
"""
|
||||
return {
|
||||
"success": False,
|
||||
"error": exc.detail,
|
||||
"error_code": exc.error_code,
|
||||
"timestamp": None, # Will be set by exception handler
|
||||
"path": path,
|
||||
**exc.extra_data
|
||||
}
|
||||
|
||||
|
||||
# Coupon specific exceptions
|
||||
class CouponNotFoundError(NotFoundError):
|
||||
"""Coupon not found error"""
|
||||
|
||||
def __init__(self, code: str):
|
||||
super().__init__("coupon", f"Coupon code '{code}' not found")
|
||||
|
||||
|
||||
class CouponAlreadyUsedError(CouponError):
|
||||
"""Coupon already used error"""
|
||||
|
||||
def __init__(self, code: str):
|
||||
super().__init__(
|
||||
f"Coupon code '{code}' has already been used",
|
||||
"COUPON_ALREADY_USED"
|
||||
)
|
||||
|
||||
|
||||
class CouponBlockedError(CouponError):
|
||||
"""Coupon blocked error"""
|
||||
|
||||
def __init__(self, code: str, remaining_minutes: int):
|
||||
super().__init__(
|
||||
f"Coupon code '{code}' is blocked. Try again in {remaining_minutes} minutes",
|
||||
"COUPON_BLOCKED"
|
||||
)
|
||||
|
||||
|
||||
class CouponLimitExceededError(CouponError):
|
||||
"""Coupon usage limit exceeded error"""
|
||||
|
||||
def __init__(self, code: str, limit: int):
|
||||
super().__init__(
|
||||
f"Coupon code '{code}' usage limit ({limit}) exceeded",
|
||||
"COUPON_LIMIT_EXCEEDED"
|
||||
)
|
||||
|
||||
|
||||
# File upload specific exceptions
|
||||
class FileTypeError(FileUploadError):
|
||||
"""Invalid file type error"""
|
||||
|
||||
def __init__(self, allowed_types: list):
|
||||
super().__init__(
|
||||
f"Invalid file type. Allowed types: {', '.join(allowed_types)}"
|
||||
)
|
||||
|
||||
|
||||
class FileSizeError(FileUploadError):
|
||||
"""File too large error"""
|
||||
|
||||
def __init__(self, max_size_mb: int):
|
||||
super().__init__(
|
||||
f"File too large. Maximum size: {max_size_mb}MB"
|
||||
)
|
||||
|
||||
|
||||
class FileExistsError(FileUploadError):
|
||||
"""File already exists error"""
|
||||
|
||||
def __init__(self, filename: str):
|
||||
super().__init__(
|
||||
f"File '{filename}' already exists. Please delete it first."
|
||||
)
|
||||
157
ebook_backend_admin_panel/admin-backend/utils/logger.py
Normal file
157
ebook_backend_admin_panel/admin-backend/utils/logger.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""
|
||||
Professional logging utility for the Ebook Coupon Management System
|
||||
Provides structured logging with proper formatting and log levels.
|
||||
"""
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Optional, Any, Dict
|
||||
import json
|
||||
|
||||
class SafeJSONEncoder(json.JSONEncoder):
|
||||
"""Custom JSON encoder that handles non-serializable objects safely"""
|
||||
|
||||
def default(self, obj):
|
||||
"""Handle non-serializable objects by converting them to strings"""
|
||||
if hasattr(obj, '__dict__'):
|
||||
return str(obj)
|
||||
elif hasattr(obj, '__str__'):
|
||||
return str(obj)
|
||||
else:
|
||||
return f"<{type(obj).__name__} object>"
|
||||
|
||||
class StructuredFormatter(logging.Formatter):
|
||||
"""Custom formatter for structured logging"""
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
"""Format log record with structured data"""
|
||||
log_entry = {
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"level": record.levelname,
|
||||
"logger": record.name,
|
||||
"message": record.getMessage(),
|
||||
"module": record.module,
|
||||
"function": record.funcName,
|
||||
"line": record.lineno
|
||||
}
|
||||
|
||||
# Add extra fields if present
|
||||
if hasattr(record, 'request_id'):
|
||||
log_entry['request_id'] = record.request_id
|
||||
if hasattr(record, 'method'):
|
||||
log_entry['method'] = record.method
|
||||
if hasattr(record, 'path'):
|
||||
log_entry['path'] = record.path
|
||||
if hasattr(record, 'status_code'):
|
||||
log_entry['status_code'] = record.status_code
|
||||
if hasattr(record, 'process_time'):
|
||||
log_entry['process_time'] = record.process_time
|
||||
if hasattr(record, 'client_ip'):
|
||||
log_entry['client_ip'] = record.client_ip
|
||||
if hasattr(record, 'user_agent'):
|
||||
log_entry['user_agent'] = record.user_agent
|
||||
if hasattr(record, 'error'):
|
||||
log_entry['error'] = record.error
|
||||
if hasattr(record, 'exception_type'):
|
||||
log_entry['exception_type'] = record.exception_type
|
||||
if hasattr(record, 'exception_message'):
|
||||
log_entry['exception_message'] = record.exception_message
|
||||
if hasattr(record, 'errors'):
|
||||
# Handle errors list safely
|
||||
try:
|
||||
if isinstance(record.errors, list):
|
||||
log_entry['errors'] = [str(error) if not isinstance(error, (dict, str, int, float, bool)) else error for error in record.errors]
|
||||
else:
|
||||
log_entry['errors'] = str(record.errors)
|
||||
except Exception:
|
||||
log_entry['errors'] = str(record.errors)
|
||||
if hasattr(record, 'app_name'):
|
||||
log_entry['app_name'] = record.app_name
|
||||
if hasattr(record, 'version'):
|
||||
log_entry['version'] = record.version
|
||||
if hasattr(record, 'environment'):
|
||||
log_entry['environment'] = record.environment
|
||||
if hasattr(record, 'debug'):
|
||||
log_entry['debug'] = record.debug
|
||||
|
||||
# Add exception info if present
|
||||
if record.exc_info:
|
||||
log_entry['exception'] = self.formatException(record.exc_info)
|
||||
|
||||
return json.dumps(log_entry, ensure_ascii=False, cls=SafeJSONEncoder)
|
||||
|
||||
def setup_logger(name: str, level: Optional[str] = None) -> logging.Logger:
|
||||
"""
|
||||
Setup a logger with proper configuration
|
||||
|
||||
Args:
|
||||
name: Logger name
|
||||
level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
|
||||
Returns:
|
||||
Configured logger instance
|
||||
"""
|
||||
# Get log level from environment or use default
|
||||
log_level = level or os.getenv("LOG_LEVEL", "INFO").upper()
|
||||
|
||||
# Create logger
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(getattr(logging, log_level))
|
||||
|
||||
# Avoid duplicate handlers
|
||||
if logger.handlers:
|
||||
return logger
|
||||
|
||||
# Create formatters
|
||||
structured_formatter = StructuredFormatter()
|
||||
console_formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
# Console handler
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
console_handler.setFormatter(console_formatter)
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# File handler for structured logs
|
||||
log_dir = "logs"
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
os.path.join(log_dir, "app.log"),
|
||||
maxBytes=10*1024*1024, # 10MB
|
||||
backupCount=5
|
||||
)
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setFormatter(structured_formatter)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
# Error file handler
|
||||
error_handler = logging.handlers.RotatingFileHandler(
|
||||
os.path.join(log_dir, "error.log"),
|
||||
maxBytes=10*1024*1024, # 10MB
|
||||
backupCount=5
|
||||
)
|
||||
error_handler.setLevel(logging.ERROR)
|
||||
error_handler.setFormatter(structured_formatter)
|
||||
logger.addHandler(error_handler)
|
||||
|
||||
return logger
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""
|
||||
Get a logger instance
|
||||
|
||||
Args:
|
||||
name: Logger name
|
||||
|
||||
Returns:
|
||||
Logger instance
|
||||
"""
|
||||
return logging.getLogger(name)
|
||||
|
||||
# Create default logger
|
||||
default_logger = setup_logger("ebook_coupon_system")
|
||||
@@ -0,0 +1,8 @@
|
||||
from fastapi.templating import Jinja2Templates
|
||||
import os
|
||||
|
||||
BASE_DIR = os.path.dirname(__file__)
|
||||
PARENT_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", ".."))
|
||||
TEMPLATE_DIR = os.path.join(PARENT_DIR, "admin-frontend")
|
||||
|
||||
templates = Jinja2Templates(directory=TEMPLATE_DIR)
|
||||
@@ -0,0 +1,83 @@
|
||||
"""
|
||||
Timezone utilities for CEST/CET conversion
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
import pytz
|
||||
|
||||
def get_cest_timezone():
|
||||
"""Get CEST/CET timezone (Europe/Berlin)"""
|
||||
return pytz.timezone('Europe/Berlin')
|
||||
|
||||
def get_server_timezone():
|
||||
"""Get server's local timezone (IST)"""
|
||||
return pytz.timezone('Asia/Kolkata')
|
||||
|
||||
def utc_to_cest(utc_datetime):
|
||||
"""
|
||||
Convert UTC datetime to CEST/CET timezone
|
||||
|
||||
Args:
|
||||
utc_datetime: UTC datetime object
|
||||
|
||||
Returns:
|
||||
datetime object in CEST/CET timezone
|
||||
"""
|
||||
if utc_datetime is None:
|
||||
return None
|
||||
|
||||
# Ensure the datetime is timezone-aware
|
||||
if utc_datetime.tzinfo is None:
|
||||
utc_datetime = utc_datetime.replace(tzinfo=timezone.utc)
|
||||
|
||||
cest_tz = get_cest_timezone()
|
||||
return utc_datetime.astimezone(cest_tz)
|
||||
|
||||
def local_to_cest(local_datetime):
|
||||
"""
|
||||
Convert local server time (IST) to CEST/CET timezone
|
||||
|
||||
Args:
|
||||
local_datetime: Local datetime object (from server)
|
||||
|
||||
Returns:
|
||||
datetime object in CEST/CET timezone
|
||||
"""
|
||||
if local_datetime is None:
|
||||
return None
|
||||
|
||||
# First, make the local datetime timezone-aware
|
||||
ist_tz = get_server_timezone()
|
||||
if local_datetime.tzinfo is None:
|
||||
local_datetime = ist_tz.localize(local_datetime)
|
||||
|
||||
# Convert to CEST/CET
|
||||
cest_tz = get_cest_timezone()
|
||||
return local_datetime.astimezone(cest_tz)
|
||||
|
||||
def format_cest_datetime(utc_datetime, format_str="%Y-%m-%d %H:%M:%S"):
|
||||
"""
|
||||
Format UTC datetime to CEST/CET timezone string
|
||||
|
||||
Args:
|
||||
utc_datetime: UTC datetime object
|
||||
format_str: Format string for datetime
|
||||
|
||||
Returns:
|
||||
Formatted string in CEST/CET timezone
|
||||
"""
|
||||
if utc_datetime is None:
|
||||
return None
|
||||
|
||||
# Convert local server time to CEST/CET
|
||||
cest_datetime = local_to_cest(utc_datetime)
|
||||
return cest_datetime.strftime(format_str)
|
||||
|
||||
def now_cest():
|
||||
"""
|
||||
Get current time in CEST/CET timezone
|
||||
|
||||
Returns:
|
||||
datetime object in CEST/CET timezone
|
||||
"""
|
||||
cest_tz = get_cest_timezone()
|
||||
return datetime.now(cest_tz)
|
||||
Reference in New Issue
Block a user