270 lines
8.1 KiB
Python
270 lines
8.1 KiB
Python
"""
|
|
Storage service for file uploads.
|
|
Supports both local filesystem and S3-compatible storage (FirstVDS).
|
|
"""
|
|
import logging
|
|
import uuid
|
|
from pathlib import Path
|
|
from typing import Literal
|
|
|
|
import boto3
|
|
from botocore.exceptions import ClientError, BotoCoreError
|
|
from botocore.config import Config
|
|
|
|
from app.core.config import settings
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
StorageFolder = Literal["avatars", "covers", "proofs"]
|
|
|
|
|
|
class StorageService:
|
|
"""Unified storage service with S3 and local filesystem support."""
|
|
|
|
def __init__(self):
|
|
self._s3_client = None
|
|
|
|
@property
|
|
def s3_client(self):
|
|
"""Lazy initialization of S3 client."""
|
|
if self._s3_client is None and settings.S3_ENABLED:
|
|
logger.info(f"Initializing S3 client: endpoint={settings.S3_ENDPOINT_URL}, bucket={settings.S3_BUCKET_NAME}")
|
|
try:
|
|
# Use signature_version=s3v4 for S3-compatible storage
|
|
self._s3_client = boto3.client(
|
|
"s3",
|
|
endpoint_url=settings.S3_ENDPOINT_URL,
|
|
aws_access_key_id=settings.S3_ACCESS_KEY_ID,
|
|
aws_secret_access_key=settings.S3_SECRET_ACCESS_KEY,
|
|
region_name=settings.S3_REGION or "us-east-1",
|
|
config=Config(signature_version="s3v4"),
|
|
)
|
|
logger.info("S3 client initialized successfully")
|
|
except Exception as e:
|
|
logger.error(f"Failed to initialize S3 client: {e}")
|
|
self._s3_client = None
|
|
return self._s3_client
|
|
|
|
def generate_filename(self, prefix: str | int, original_filename: str | None) -> str:
|
|
"""Generate unique filename with prefix."""
|
|
ext = "jpg"
|
|
if original_filename and "." in original_filename:
|
|
ext = original_filename.rsplit(".", 1)[-1].lower()
|
|
return f"{prefix}_{uuid.uuid4().hex}.{ext}"
|
|
|
|
async def upload_file(
|
|
self,
|
|
content: bytes,
|
|
folder: StorageFolder,
|
|
filename: str,
|
|
content_type: str = "application/octet-stream",
|
|
) -> str:
|
|
"""
|
|
Upload file to storage.
|
|
|
|
Returns:
|
|
Path/key to the uploaded file (relative path for local, S3 key for S3)
|
|
"""
|
|
if settings.S3_ENABLED:
|
|
try:
|
|
return await self._upload_to_s3(content, folder, filename, content_type)
|
|
except Exception as e:
|
|
logger.error(f"S3 upload failed, falling back to local: {e}")
|
|
return await self._upload_to_local(content, folder, filename)
|
|
else:
|
|
return await self._upload_to_local(content, folder, filename)
|
|
|
|
async def _upload_to_s3(
|
|
self,
|
|
content: bytes,
|
|
folder: StorageFolder,
|
|
filename: str,
|
|
content_type: str,
|
|
) -> str:
|
|
"""Upload file to S3."""
|
|
key = f"{folder}/{filename}"
|
|
|
|
if not self.s3_client:
|
|
raise RuntimeError("S3 client not initialized")
|
|
|
|
try:
|
|
logger.info(f"Uploading to S3: bucket={settings.S3_BUCKET_NAME}, key={key}")
|
|
self.s3_client.put_object(
|
|
Bucket=settings.S3_BUCKET_NAME,
|
|
Key=key,
|
|
Body=content,
|
|
ContentType=content_type,
|
|
)
|
|
logger.info(f"Successfully uploaded to S3: {key}")
|
|
return key
|
|
except (ClientError, BotoCoreError) as e:
|
|
logger.error(f"S3 upload error: {e}")
|
|
raise RuntimeError(f"Failed to upload to S3: {e}")
|
|
|
|
async def _upload_to_local(
|
|
self,
|
|
content: bytes,
|
|
folder: StorageFolder,
|
|
filename: str,
|
|
) -> str:
|
|
"""Upload file to local filesystem."""
|
|
filepath = Path(settings.UPLOAD_DIR) / folder / filename
|
|
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
with open(filepath, "wb") as f:
|
|
f.write(content)
|
|
|
|
return str(filepath)
|
|
|
|
def get_url(self, path: str | None, folder: StorageFolder) -> str | None:
|
|
"""
|
|
Get public URL for a file.
|
|
|
|
Args:
|
|
path: File path/key (can be full path or just filename)
|
|
folder: Storage folder (avatars, covers, proofs)
|
|
|
|
Returns:
|
|
Public URL or None if path is None
|
|
"""
|
|
if not path:
|
|
return None
|
|
|
|
# Extract filename from path
|
|
filename = path.split("/")[-1]
|
|
|
|
if settings.S3_ENABLED:
|
|
# S3 URL
|
|
return f"{settings.S3_PUBLIC_URL}/{folder}/{filename}"
|
|
else:
|
|
# Local URL
|
|
return f"/uploads/{folder}/{filename}"
|
|
|
|
async def delete_file(self, path: str | None) -> bool:
|
|
"""
|
|
Delete file from storage.
|
|
|
|
Args:
|
|
path: File path/key
|
|
|
|
Returns:
|
|
True if deleted, False otherwise
|
|
"""
|
|
if not path:
|
|
return False
|
|
|
|
if settings.S3_ENABLED:
|
|
return await self._delete_from_s3(path)
|
|
else:
|
|
return await self._delete_from_local(path)
|
|
|
|
async def _delete_from_s3(self, key: str) -> bool:
|
|
"""Delete file from S3."""
|
|
try:
|
|
self.s3_client.delete_object(
|
|
Bucket=settings.S3_BUCKET_NAME,
|
|
Key=key,
|
|
)
|
|
return True
|
|
except ClientError:
|
|
return False
|
|
|
|
async def _delete_from_local(self, path: str) -> bool:
|
|
"""Delete file from local filesystem."""
|
|
try:
|
|
filepath = Path(path)
|
|
if filepath.exists():
|
|
filepath.unlink()
|
|
return True
|
|
return False
|
|
except Exception:
|
|
return False
|
|
|
|
async def get_file(
|
|
self,
|
|
path: str,
|
|
folder: StorageFolder,
|
|
) -> tuple[bytes, str] | None:
|
|
"""
|
|
Get file content from storage.
|
|
|
|
Args:
|
|
path: File path/key (can be full path or just filename)
|
|
folder: Storage folder
|
|
|
|
Returns:
|
|
Tuple of (content bytes, content_type) or None if not found
|
|
"""
|
|
if not path:
|
|
return None
|
|
|
|
# Extract filename from path
|
|
filename = path.split("/")[-1]
|
|
|
|
if settings.S3_ENABLED:
|
|
return await self._get_from_s3(folder, filename)
|
|
else:
|
|
return await self._get_from_local(folder, filename)
|
|
|
|
async def _get_from_s3(
|
|
self,
|
|
folder: StorageFolder,
|
|
filename: str,
|
|
) -> tuple[bytes, str] | None:
|
|
"""Get file from S3."""
|
|
key = f"{folder}/{filename}"
|
|
|
|
if not self.s3_client:
|
|
logger.error("S3 client not initialized")
|
|
return None
|
|
|
|
try:
|
|
response = self.s3_client.get_object(
|
|
Bucket=settings.S3_BUCKET_NAME,
|
|
Key=key,
|
|
)
|
|
content = response["Body"].read()
|
|
content_type = response.get("ContentType", "application/octet-stream")
|
|
return content, content_type
|
|
except ClientError as e:
|
|
logger.error(f"S3 get error for {key}: {e}")
|
|
return None
|
|
|
|
async def _get_from_local(
|
|
self,
|
|
folder: StorageFolder,
|
|
filename: str,
|
|
) -> tuple[bytes, str] | None:
|
|
"""Get file from local filesystem."""
|
|
filepath = Path(settings.UPLOAD_DIR) / folder / filename
|
|
|
|
if not filepath.exists():
|
|
return None
|
|
|
|
try:
|
|
with open(filepath, "rb") as f:
|
|
content = f.read()
|
|
|
|
# Determine content type from extension
|
|
ext = filename.rsplit(".", 1)[-1].lower() if "." in filename else ""
|
|
content_types = {
|
|
"jpg": "image/jpeg",
|
|
"jpeg": "image/jpeg",
|
|
"png": "image/png",
|
|
"gif": "image/gif",
|
|
"webp": "image/webp",
|
|
"mp4": "video/mp4",
|
|
"webm": "video/webm",
|
|
"mov": "video/quicktime",
|
|
}
|
|
content_type = content_types.get(ext, "application/octet-stream")
|
|
|
|
return content, content_type
|
|
except Exception as e:
|
|
logger.error(f"Local get error for {filepath}: {e}")
|
|
return None
|
|
|
|
|
|
# Singleton instance
|
|
storage_service = StorageService()
|