diff --git a/data/html/index.html b/data/html/index.html
index 6db5796..470b139 100644
--- a/data/html/index.html
+++ b/data/html/index.html
@@ -25,6 +25,7 @@
+
@@ -72,6 +73,17 @@
const linkContainer = document.getElementById("linkContainer");
const uploadedLink = document.getElementById("uploadedLink");
const clipboardMessage = document.getElementById("clipboardMessage");
+ const uploadError = document.getElementById("uploadError");
+
+ function formatRetryAfter(seconds) {
+ const safeSeconds = Math.max(0, Math.floor(seconds));
+ const minutes = Math.floor(safeSeconds / 60);
+ const remainder = safeSeconds % 60;
+ if (minutes > 0) {
+ return `${minutes}m ${remainder}s`;
+ }
+ return `${remainder}s`;
+ }
// Update duration display
durationSlider.addEventListener("input", function () {
@@ -115,6 +127,8 @@
};
try {
+ uploadError.style.display = "none";
+ uploadError.textContent = "";
const response = await fetch("/api/upload", {
method: "POST",
headers: {
@@ -123,10 +137,33 @@
body: JSON.stringify(payload),
});
- const result = await response.json();
- console.log(
- `✅ Upload received!\n${JSON.stringify(result, null, 2)}`
- );
+ let result = null;
+ try {
+ result = await response.json();
+ } catch (parseError) {
+ result = null;
+ }
+
+ if (!response.ok) {
+ const retryAfterSeconds = result && Number.isFinite(Number(result.retry_after_seconds))
+ ? Number(result.retry_after_seconds)
+ : null;
+ let errorMessage =
+ (result && result.error) ||
+ `Upload failed (${response.status})`;
+ if (retryAfterSeconds !== null) {
+ errorMessage += ` Try again in ${formatRetryAfter(retryAfterSeconds)}.`;
+ }
+ uploadError.textContent = errorMessage;
+ uploadError.style.display = "block";
+ return;
+ }
+
+ if (!result || !result.link) {
+ uploadError.textContent = "Upload failed (invalid response)";
+ uploadError.style.display = "block";
+ return;
+ }
// Hide duration controls and buttons
document.querySelector('label[for="durationSlider"]').style.display =
@@ -175,7 +212,8 @@
};
}
} catch (error) {
- console.log(`❌ Error: ${error.message}`);
+ uploadError.textContent = `Upload failed (${error.message})`;
+ uploadError.style.display = "block";
}
}
@@ -201,6 +239,8 @@
resetBtn.style.display = "none";
linkContainer.style.display = "none";
clipboardMessage.style.display = "none";
+ uploadError.style.display = "none";
+ uploadError.textContent = "";
uploadZone.focus();
});
diff --git a/data/html/style.css b/data/html/style.css
index c5de58b..1d72a0b 100644
--- a/data/html/style.css
+++ b/data/html/style.css
@@ -91,6 +91,18 @@ h1 .home-link:hover {
transition: all 0.3s ease;
}
+.upload-error {
+ margin: 12px 0 0 0;
+ padding: 10px 12px;
+ border: 1px solid #ff6666;
+ border-radius: 10px;
+ background-color: rgba(255, 102, 102, 0.12);
+ color: #ff6666;
+ font-size: 0.9em;
+ text-align: center;
+ box-shadow: 0 4px 12px rgba(255, 102, 102, 0.15);
+}
+
.duration-container .button-row {
display: flex;
flex-direction: row;
diff --git a/src/api.rs b/src/api.rs
index ff45fc9..3785c7f 100644
--- a/src/api.rs
+++ b/src/api.rs
@@ -1,6 +1,8 @@
+use actix_web::http::header;
use actix_web::{HttpRequest, HttpResponse, get, post, web};
use base64::{Engine, engine::general_purpose};
+use chrono::Utc;
use serde::Deserialize;
use serde_json::json;
@@ -9,6 +11,7 @@ use crate::{
data_mgt::{AppState, Asset},
logs::{LogEvent, LogEventType, log_event},
};
+use crate::{MAX_ASSET_DURATION, MIN_ASSET_DURATION};
#[derive(Deserialize, Debug)]
pub struct UploadRequest {
@@ -24,6 +27,8 @@ async fn api_upload(
app_state: web::Data
,
) -> Result {
// Check for rate limiting
+ let now = Utc::now().timestamp_millis();
+
let connection_info = req.connection_info();
let uploader_ip = connection_info
@@ -32,25 +37,34 @@ async fn api_upload(
.or_else(|| connection_info.peer_addr().map(|value| value.to_string()))
.ok_or_else(|| actix_web::error::ErrorBadRequest("Cannot determine client ip"))?;
- match app_state.connection_tracker.is_allowed(&uploader_ip).await {
- true => {}
- false => {
- return Ok(HttpResponse::TooManyRequests().body("Upload limit exceeded"));
- }
- }
-
// Convert to bytes
let content_bytes = if body.content_type == "text/plain" {
body.content.as_bytes().to_vec()
} else {
match general_purpose::STANDARD.decode(&body.content) {
Ok(bytes) => bytes,
- Err(_) => return Ok(HttpResponse::BadRequest().body("Invalid base64 payload")),
+ Err(_) => return Ok(HttpResponse::BadRequest().json(json!({ "error": "Invalid base64 content" }))),
}
};
+ let clamped_duration = body.duration.clamp(MIN_ASSET_DURATION, MAX_ASSET_DURATION);
+ let asset_expiration_time = now + (clamped_duration as i64 * 60 * 1000);
+ let (allowed, retry_after_ms) = app_state
+ .connection_tracker
+ .check(&uploader_ip, asset_expiration_time)
+ .await;
+ if !allowed {
+ let retry_after_seconds = retry_after_ms.map(|ms| ((ms + 999) / 1000).max(1));
+ let response_body = match retry_after_seconds {
+ Some(seconds) => json!({ "error": "Upload limit exceeded", "retry_after_seconds": seconds }),
+ None => json!({ "error": "Upload limit exceeded" }),
+ };
+ // return Ok(HttpResponse::TooManyRequests().body("Upload limit exceeded"));
+ return Ok(HttpResponse::TooManyRequests().json(response_body));
+ }
+
let asset = crate::data_mgt::Asset::new(
- body.duration,
+ clamped_duration,
body.content_type.clone(),
content_bytes,
Some(uploader_ip.clone()),
diff --git a/src/data_mgt.rs b/src/data_mgt.rs
index c6fa128..3a1b8ba 100644
--- a/src/data_mgt.rs
+++ b/src/data_mgt.rs
@@ -9,7 +9,7 @@ use serde_json::Value;
use crate::MAX_ASSETS;
use crate::{
- MAX_UPLOADS_PER_HOUR_PER_USER,
+ MAX_UPLOADS_PER_USER,
logs::{LogEventType, log_event},
};
@@ -182,19 +182,19 @@ pub struct RateLimiter {
}
impl RateLimiter {
- pub async fn is_allowed(&self, client_ip: &str) -> bool {
- let mut clients = self.clients.lock().await;
+ pub async fn check(&self, client_ip: &str, asset_exp_time: i64) -> (bool, Option) {
+ self.clear_expired().await;
let now = Utc::now().timestamp_millis();
- let one_hour_ago = now - Duration::hours(1).num_milliseconds();
-
+ let mut clients = self.clients.lock().await;
let entry = clients.entry(client_ip.to_string()).or_insert_with(Vec::new);
- entry.retain(|×tamp| timestamp > one_hour_ago);
- let ret_val = if entry.len() < MAX_UPLOADS_PER_HOUR_PER_USER {
- entry.push(now);
- true
+ let ret_val = if entry.len() < MAX_UPLOADS_PER_USER {
+ entry.push(asset_exp_time);
+ (true, None)
} else {
- false
+ let first_to_expire = entry.iter().min().copied().unwrap();
+ let retry_after_ms = (first_to_expire - now).max(1);
+ (false, Some(retry_after_ms))
};
println!("{:?}", clients);
ret_val
@@ -203,10 +203,9 @@ impl RateLimiter {
pub async fn clear_expired(&self) {
let mut clients = self.clients.lock().await;
let now = Utc::now().timestamp_millis();
- let one_hour_ago = now - Duration::hours(1).num_milliseconds();
for timestamps in clients.values_mut() {
- timestamps.retain(|×tamp| timestamp > one_hour_ago);
+ timestamps.retain(|×tamp| timestamp > now);
}
}
}
diff --git a/src/main.rs b/src/main.rs
index 9802a33..ed09374 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -14,9 +14,11 @@ use std::{env, fs, path::PathBuf, sync::LazyLock};
pub static HTML_DIR: &str = "data/html/";
pub static LOG_DIR: &str = "data/logs/";
pub static LOG_FILE_NAME: &str = "log.txt";
+pub static MIN_ASSET_DURATION: u32 = 1; // in minutes
+pub static MAX_ASSET_DURATION: u32 = 60; // in minutes
pub static MAX_ASSETS: usize = 1000;
pub static MAX_ASSET_SIZE_BYTES: usize = 3 * 1024 * 1024; // 3 MB
-pub static MAX_UPLOADS_PER_HOUR_PER_USER: usize = 10;
+pub static MAX_UPLOADS_PER_USER: usize = 10;
pub static BIND_ADDR: LazyLock = LazyLock::new(|| match env::var("BIND_ADDR") {
Ok(addr) => {