diff --git a/backend/src/main.rs b/backend/src/main.rs index ff60909..d5d272b 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -11,7 +11,7 @@ use tower_http::{ cors::{Any, CorsLayer}, services::ServeDir, }; -use tracing::{error, info}; +use tracing::{error, info, warn}; #[derive(Clone)] struct AppState { @@ -62,6 +62,7 @@ struct CreatePostRequest { #[derive(Serialize)] struct ErrorResponse { error: String, + details: Option, } #[derive(Serialize)] @@ -85,11 +86,17 @@ async fn main() { let data_dir_str = env::var("DATA_DIR").unwrap_or_else(|_| "../data".to_string()); let data_dir = PathBuf::from(data_dir_str); + info!("Initializing backend with data dir: {:?}", data_dir); + // Ensure directories exist let posts_dir = data_dir.join("posts"); let uploads_dir = data_dir.join("uploads"); - fs::create_dir_all(&posts_dir).expect("Failed to create posts directory"); - fs::create_dir_all(&uploads_dir).expect("Failed to create uploads directory"); + if let Err(e) = fs::create_dir_all(&posts_dir) { + error!("Failed to create posts directory: {}", e); + } + if let Err(e) = fs::create_dir_all(&uploads_dir) { + error!("Failed to create uploads directory: {}", e); + } let state = Arc::new(AppState { admin_token, @@ -108,7 +115,7 @@ async fn main() { .route("/api/uploads", get(list_uploads)) .route("/api/upload", post(upload_file)) .nest_service("/uploads", ServeDir::new(uploads_dir)) - .layer(DefaultBodyLimit::max(50 * 1024 * 1024)) // 50MB limit + .layer(DefaultBodyLimit::max(50 * 1024 * 1024)) .layer(cors) .with_state(state); @@ -121,9 +128,10 @@ async fn main() { fn check_auth(headers: &HeaderMap, admin_token: &str) -> Result<(), (StatusCode, Json)> { let auth_header = headers.get("Authorization").and_then(|h| h.to_str().ok()); if auth_header != Some(&format!("Bearer {}", admin_token)) { + warn!("Unauthorized access attempt detected"); return Err(( StatusCode::UNAUTHORIZED, - Json(ErrorResponse { error: "Unauthorized".to_string() }), + Json(ErrorResponse { error: "Unauthorized".to_string(), details: None }), )); } Ok(()) @@ -147,12 +155,14 @@ async fn update_config( check_auth(&headers, &state.admin_token)?; let config_path = state.data_dir.join("config.json"); - let config_str = serde_json::to_string_pretty(&payload).map_err(|_| { - (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Serialization error".to_string() })) + let config_str = serde_json::to_string_pretty(&payload).map_err(|e| { + error!("Serialization error: {}", e); + (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Serialization error".to_string(), details: Some(e.to_string()) })) })?; - fs::write(&config_path, config_str).map_err(|_| { - (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Write error".to_string() })) + fs::write(&config_path, config_str).map_err(|e| { + error!("Write error for config: {}", e); + (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Write error".to_string(), details: Some(e.to_string()) })) })?; Ok(Json(payload)) @@ -165,20 +175,21 @@ async fn create_post( ) -> Result, (StatusCode, Json)> { check_auth(&headers, &state.admin_token)?; - // Validate slug to prevent directory traversal if payload.slug.contains('/') || payload.slug.contains('\\') || payload.slug.contains("..") { return Err(( StatusCode::BAD_REQUEST, - Json(ErrorResponse { error: "Invalid slug".to_string() }), + Json(ErrorResponse { error: "Invalid slug".to_string(), details: None }), )); } let file_path = state.data_dir.join("posts").join(format!("{}.md", payload.slug)); - fs::write(&file_path, &payload.content).map_err(|_| { - (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Write error".to_string() })) + fs::write(&file_path, &payload.content).map_err(|e| { + error!("Write error for post {}: {}", payload.slug, e); + (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Write error".to_string(), details: Some(e.to_string()) })) })?; + info!("Post created/updated: {}", payload.slug); Ok(Json(PostDetail { slug: payload.slug, content: payload.content, @@ -193,26 +204,23 @@ async fn delete_post( check_auth(&headers, &state.admin_token)?; let file_path = state.data_dir.join("posts").join(format!("{}.md", slug)); + info!("Attempting to delete post at: {:?}", file_path); - // Security check to prevent directory traversal - if file_path.parent() != Some(&state.data_dir.join("posts")) { + if !file_path.exists() { + warn!("Post not found for deletion: {}", slug); return Err(( - StatusCode::BAD_REQUEST, - Json(ErrorResponse { error: "Invalid slug".to_string() }), + StatusCode::NOT_FOUND, + Json(ErrorResponse { error: "Post not found".to_string(), details: None }), )); } - if file_path.exists() { - fs::remove_file(file_path).map_err(|_| { - (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Delete error".to_string() })) - })?; - Ok(StatusCode::NO_CONTENT) - } else { - Err(( - StatusCode::NOT_FOUND, - Json(ErrorResponse { error: "Post not found".to_string() }), - )) - } + fs::remove_file(file_path).map_err(|e| { + error!("Delete error for post {}: {}", slug, e); + (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Delete error".to_string(), details: Some(e.to_string()) })) + })?; + + info!("Post deleted: {}", slug); + Ok(StatusCode::NO_CONTENT) } async fn list_uploads( @@ -267,19 +275,11 @@ async fn get_post( ) -> Result, (StatusCode, Json)> { let file_path = state.data_dir.join("posts").join(format!("{}.md", slug)); - // Security check to prevent directory traversal - if file_path.parent() != Some(&state.data_dir.join("posts")) { - return Err(( - StatusCode::BAD_REQUEST, - Json(ErrorResponse { error: "Invalid slug".to_string() }), - )); - } - match fs::read_to_string(&file_path) { Ok(content) => Ok(Json(PostDetail { slug, content })), Err(_) => Err(( StatusCode::NOT_FOUND, - Json(ErrorResponse { error: "Post not found".to_string() }), + Json(ErrorResponse { error: "Post not found".to_string(), details: None }), )), } } @@ -291,38 +291,63 @@ async fn upload_file( ) -> Result, (StatusCode, Json)> { check_auth(&headers, &state.admin_token)?; + info!("Upload requested"); + while let Ok(Some(field)) = multipart.next_field().await { - if let Some(file_name) = field.file_name() { - let file_name = slug::slugify(file_name); - let uploads_dir = state.data_dir.join("uploads"); - let file_path = uploads_dir.join(&file_name); + let file_name = match field.file_name() { + Some(name) => name.to_string(), + None => continue, + }; - // Simple conflict resolution - let final_path = if file_path.exists() { - let timestamp = chrono::Utc::now().timestamp(); - uploads_dir.join(format!("{}_{}", timestamp, file_name)) - } else { - file_path - }; + info!("Processing upload for: {}", file_name); + let slugified_name = slug::slugify(&file_name); + + // Handle extension correctly after slugifying + let extension = std::path::Path::new(&file_name) + .extension() + .and_then(|e| e.to_str()) + .unwrap_or(""); + + let final_name = if !extension.is_empty() { + format!("{}.{}", slugified_name, extension) + } else { + slugified_name + }; - let final_name = final_path.file_name().unwrap().to_str().unwrap().to_string(); + let uploads_dir = state.data_dir.join("uploads"); + let file_path = uploads_dir.join(&final_name); - let data = field.bytes().await.map_err(|_| { - (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Read error".to_string() })) - })?; + let final_path = if file_path.exists() { + let timestamp = chrono::Utc::now().timestamp(); + uploads_dir.join(format!("{}_{}", timestamp, final_name)) + } else { + file_path + }; - fs::write(&final_path, &data).map_err(|_| { - (StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Write error".to_string() })) - })?; + let final_name_str = final_path.file_name().unwrap().to_str().unwrap().to_string(); - return Ok(Json(UploadResponse { - url: format!("/uploads/{}", final_name), - })); + let data = match field.bytes().await { + Ok(bytes) => bytes, + Err(e) => { + error!("Failed to read multipart bytes: {}", e); + return Err((StatusCode::BAD_REQUEST, Json(ErrorResponse { error: "Read error".to_string(), details: Some(e.to_string()) }))); + } + }; + + if let Err(e) = fs::write(&final_path, &data) { + error!("Failed to write file to {:?}: {}", final_path, e); + return Err((StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse { error: "Write error".to_string(), details: Some(e.to_string()) }))); } + + info!("File uploaded successfully to {:?}", final_path); + return Ok(Json(UploadResponse { + url: format!("/uploads/{}", final_name_str), + })); } + warn!("Upload failed: no file found in multipart stream"); Err(( StatusCode::BAD_REQUEST, - Json(ErrorResponse { error: "No file found".to_string() }), + Json(ErrorResponse { error: "No file found".to_string(), details: None }), )) } diff --git a/frontend/src/pages/api/[...path].ts b/frontend/src/pages/api/[...path].ts index db0aa7c..b9c1744 100644 --- a/frontend/src/pages/api/[...path].ts +++ b/frontend/src/pages/api/[...path].ts @@ -4,29 +4,51 @@ export const ALL: APIRoute = async ({ request, params }) => { const API_URL = process.env.PUBLIC_API_URL || 'http://backend:3000'; const path = params.path; + if (!path) { + return new Response(JSON.stringify({ error: 'Missing path' }), { status: 400 }); + } + const url = new URL(`${API_URL}/api/${path}`); - - // Forward search parameters const requestUrl = new URL(request.url); url.search = requestUrl.search; + // Filter headers to avoid conflicts with the backend container + const headers = new Headers(); + const forbiddenHeaders = ['host', 'connection', 'origin', 'referer', 'content-length']; + + request.headers.forEach((value, key) => { + if (!forbiddenHeaders.includes(key.toLowerCase())) { + headers.set(key, value); + } + }); + try { const response = await fetch(url.toString(), { method: request.method, - headers: request.headers, - body: request.method !== 'GET' && request.method !== 'HEAD' ? await request.arrayBuffer() : undefined, - // @ts-ignore + headers: headers, + // Pass the body stream directly for efficiency + body: request.method !== 'GET' && request.method !== 'HEAD' ? request.body : undefined, + // @ts-ignore - duplex is required for streaming bodies in some fetch implementations duplex: 'half' }); + // Extract headers for the response + const responseHeaders = new Headers(); + response.headers.forEach((value, key) => { + responseHeaders.set(key, value); + }); + return new Response(response.body, { status: response.status, - headers: response.headers + headers: responseHeaders }); } catch (e) { - console.error(`Proxy error for ${url}:`, e); - return new Response(JSON.stringify({ error: 'Proxy error' }), { - status: 500, + console.error(`[Proxy Error] ${request.method} ${url}:`, e); + return new Response(JSON.stringify({ + error: 'Proxy connection failed', + details: e instanceof Error ? e.message : String(e) + }), { + status: 502, headers: { 'Content-Type': 'application/json' } }); }