8 Commits
v0.3.0 ... main

Author SHA1 Message Date
ffa1194ed6 Release v1.1.0
All checks were successful
Build & Publish / build_publish (push) Successful in 3m14s
2026-01-25 11:12:54 +01:00
bb8bbf8a24 feat: enhance logging and error handling, update HTML templates, and add footer 2026-01-25 11:07:44 +01:00
f403b6549d Release v1.0.1 - Bug fixes for logging and upload UI
All checks were successful
Build & Publish / build_publish (push) Successful in 1m29s
2026-01-16 16:38:12 +01:00
cfbd9ff4d3 feat: release v0.3.1 with bug fixes for upload workflow and logging improvements 2026-01-16 16:33:41 +01:00
c7c5c5f135 feat: enhance upload handling and logging improvements 2026-01-16 15:17:57 +01:00
e0d1f263dd chore: release v1.0.0
All checks were successful
Build & Publish / build_publish (push) Successful in 1m30s
2026-01-16 11:29:22 +01:00
1d75df2d41 feat: implement upload error handling and rate limiting improvements 2026-01-16 11:23:14 +01:00
e90c4576a5 feat: update dependencies, enhance upload rate limiting, and improve UI elements 2026-01-16 08:54:14 +01:00
17 changed files with 589 additions and 350 deletions

View File

@@ -5,6 +5,52 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.1.0] - 2026-01-25
### Added
- Shared HTML footer template injected into all pages.
- Structured log file handler with rotation, append-only writes, and stats parsing support.
- Upload size guard using `MAX_ASSET_SIZE_BYTES`.
- Unit tests for core storage and rate limiting behavior.
### Changed
- Asset storage cleanup and capacity checks are now race-safe.
- Stats aggregation now reads from the structured log file helper.
## [1.0.1] - 2026-01-16
### Fixed
- Asset addition logging now displays immediately instead of being buffered (changed `print!` to `println!`).
- Upload functionality blocked after first successful upload - users can now only upload once per page session.
- Paste, drag & drop, and file selection disabled after successful upload to prevent confusion.
- JavaScript syntax errors in event listener registration that prevented copy/paste functionality.
- Removed nested and duplicated event listeners that caused unexpected behavior.
### Changed
- Added `uploadCompleted` flag to track upload state and prevent multiple uploads per session.
- Reset button now properly clears the `uploadCompleted` flag to allow new uploads.
## [1.0.0] - 2026-01-14
### Added
- UI error banner for failed uploads, including retry timing.
- `retry_after_seconds` in the upload error response to inform clients when to retry.
- Server-side duration clamping for uploads (1-60 minutes).
### Changed
- Upload throttling now tracks active assets per user using asset expiration times.
- Upload error responses are consistently JSON.
### Removed
- `Retry-After` response header on upload limit errors.
## [0.3.0] - 2026-01-13
### Added

85
Cargo.lock generated
View File

@@ -273,7 +273,7 @@ checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
[[package]]
name = "black_hole_share"
version = "0.3.0"
version = "1.1.0"
dependencies = [
"actix-files",
"actix-web",
@@ -281,6 +281,7 @@ dependencies = [
"base64",
"chrono",
"futures",
"mime_guess",
"serde",
"serde_json",
"tokio",
@@ -340,9 +341,9 @@ dependencies = [
[[package]]
name = "cc"
version = "1.2.51"
version = "1.2.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a0aeaff4ff1a90589618835a598e545176939b97874f7abc7851caa0618f203"
checksum = "cd4932aefd12402b36c60956a4fe0035421f544799057659ff86f923657aada3"
dependencies = [
"find-msvc-tools",
"jobserver",
@@ -503,15 +504,15 @@ dependencies = [
[[package]]
name = "find-msvc-tools"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "645cbb3a84e60b7531617d5ae4e57f7e27308f6445f5abf653209ea76dec8dff"
checksum = "f449e6c6c08c865631d4890cfacf252b3d396c9bcc83adb6623cdb02a8336c41"
[[package]]
name = "flate2"
version = "1.1.5"
version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb"
checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -837,9 +838,9 @@ checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2"
[[package]]
name = "indexmap"
version = "2.12.1"
version = "2.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
dependencies = [
"equivalent",
"hashbrown",
@@ -879,9 +880,9 @@ checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
[[package]]
name = "libc"
version = "0.2.178"
version = "0.2.180"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091"
checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
[[package]]
name = "litemap"
@@ -1059,18 +1060,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.104"
version = "1.0.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0"
checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.42"
version = "1.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a"
dependencies = [
"proc-macro2",
]
@@ -1103,9 +1104,9 @@ dependencies = [
[[package]]
name = "rand_core"
version = "0.9.3"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c"
dependencies = [
"getrandom",
]
@@ -1219,9 +1220,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.148"
version = "1.0.149"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da"
checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
dependencies = [
"itoa",
"memchr",
@@ -1315,9 +1316,9 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
[[package]]
name = "syn"
version = "2.0.112"
version = "2.0.114"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21f182278bf2d2bcb3c88b1b08a37df029d71ce3d3ae26168e3c653b213b99d4"
checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a"
dependencies = [
"proc-macro2",
"quote",
@@ -1337,30 +1338,30 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.44"
version = "0.3.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde",
"serde_core",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca"
[[package]]
name = "time-macros"
version = "0.2.24"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd"
dependencies = [
"num-conv",
"time-core",
@@ -1378,9 +1379,9 @@ dependencies = [
[[package]]
name = "tokio"
version = "1.48.0"
version = "1.49.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86"
dependencies = [
"bytes",
"libc",
@@ -1406,9 +1407,9 @@ dependencies = [
[[package]]
name = "tokio-util"
version = "0.7.17"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594"
checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098"
dependencies = [
"bytes",
"futures-core",
@@ -1457,9 +1458,9 @@ checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
[[package]]
name = "unicase"
version = "2.8.1"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142"
[[package]]
name = "unicode-ident"
@@ -1481,9 +1482,9 @@ checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]]
name = "url"
version = "2.5.7"
version = "2.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed"
dependencies = [
"form_urlencoded",
"idna",
@@ -1832,18 +1833,18 @@ dependencies = [
[[package]]
name = "zerocopy"
version = "0.8.31"
version = "0.8.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.31"
version = "0.8.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1"
dependencies = [
"proc-macro2",
"quote",
@@ -1906,9 +1907,9 @@ dependencies = [
[[package]]
name = "zmij"
version = "1.0.7"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de9211a9f64b825911bdf0240f58b7a8dac217fe260fc61f080a07f61372fbd5"
checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea"
[[package]]
name = "zstd"

View File

@@ -1,6 +1,6 @@
[package]
name = "black_hole_share"
version = "0.3.0"
version = "1.1.0"
edition = "2024"
[dependencies]
@@ -11,11 +11,7 @@ chrono = "0.4"
futures = "0.3.31"
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.148"
tokio = { version = "1.48.0", features = [
"macros",
"rt-multi-thread",
"signal",
"time",
] }
tokio = { version = "1.48.0", features = ["fs", "macros", "rt-multi-thread", "signal", "time"] }
uuid = { version = "1.19.0", features = ["v4"] }
base64 = "0.22.1"
mime_guess = "2.0.5"

117
README.md
View File

@@ -2,35 +2,17 @@
A lightweight, ephemeral file sharing service built with Rust and Actix-Web. Upload images or text with a configurable TTL (1-60 minutes) and share via a unique link. Content is automatically purged after expiration.
## Features
## Usage
- **Ephemeral sharing** uploads auto-delete after the specified duration
- **Image & text support** drag/drop, paste, or file picker for images; paste text directly
- **Zero database** assets stored as JSON files on disk
- **Dark theme UI** clean, responsive interface with zoom overlay
- **Statistics dashboard** real-time stats at `/stats.html` (active assets, uploads, response times)
- **Access logging** request and asset events logged to `data/logs/log.txt` with IP, timing, and metadata
- **Code-friendly text view** code-like text content auto-formats with syntax highlighting
- **Site assets** favicon set and web manifest for installable branding
## Quick Start
### Local Development
### Run locally
```bash
# Run from repo root (paths resolve to data/html/, data/logs/, data/storage/)
cargo run --release
```
Server starts at `http://0.0.0.0:8080` by default.
> **Note:** All paths are relative to the repo root: `data/html/`, `data/logs/`, `data/storage/`.
### Toolchain
Rust toolchain is pinned in `rust-toolchain.toml` (current: 1.90.0).
### Docker
### Run with Docker
```bash
docker-compose up --build
@@ -38,95 +20,26 @@ docker-compose up --build
Exposes port `8080` mapped to container port `80`. Volume mounts `./data:/data`.
## Configuration
### Configuration
| Environment Variable | Default | Description |
| -------------------- | --------- | --------------- |
| `BIND_ADDR` | `0.0.0.0` | Address to bind |
| `BIND_PORT` | `8080` | Port to bind |
## API
### Web
### Upload
- `GET /` - Upload page
- `GET /stats` - Stats dashboard
- `GET /bhs/{id}` - View shared content
```http
POST /api/upload
Content-Type: application/json
### API
{
"duration": 5,
"content_type": "text/plain",
"content": "Hello, world!"
}
```
- `POST /api/upload` with JSON `{ duration, content_type, content }`
- `GET /api/content/{id}`
- `GET /api/stats`
- `duration` TTL in minutes (1-60)
- `content_type` MIME type (`text/plain` or `image/*`)
- `content` raw text or base64-encoded image data
### Logging
**Response:**
```json
{ "link": "/bhs/550e8400-e29b-41d4-a716-446655440000" }
```
### Fetch
```http
GET /api/content/{id}
```
Returns the original content with appropriate MIME type, or `404` if expired/missing.
### Statistics
```http
GET /api/stats
```
**Response:**
```json
{
"active_assets": 5,
"total_uploads": 42,
"total_deleted": 37,
"storage_bytes": 1048576,
"image_count": 3,
"text_count": 2,
"total_requests": 150,
"recent_activity": [...]
}
```
## Project Structure
```
├── src/
│ ├── main.rs # HTTP server, routing, background cleanup
│ ├── api.rs # Upload/fetch endpoints
│ ├── data_mgt.rs # Asset model, persistence, expiration
│ └── logs.rs # Request and asset event logging
├── data/
│ ├── html/ # Frontend (index.html, view.html, stats.html, style.css)
│ ├── logs/ # Access logs
│ └── storage/ # Stored assets (auto-created)
├── Dockerfile
├── docker-compose.yaml
└── Cargo.toml
```
## Runtime Layout
The server uses paths relative to the repo root under `data/`:
- `data/html/` frontend assets (index.html, view.html, style.css)
- `data/logs/` access logs (`log.txt`, rotated on startup with timestamps)
- `data/storage/` uploaded assets (auto-created)
- **Local dev:** Run from repo root with `cargo run --release`
- **Docker:** Volume mounts `./data:/data`, container WORKDIR is `/`
## License
MIT
- Logs are written to `data/logs/log.txt`.
- On startup, the previous log file is rotated with a timestamped name.

View File

@@ -24,17 +24,7 @@
</div>
</div>
<footer class="powered-by" style="display: flex; align-items: center">
<span style="flex: 1"></span>
<span>Powered by: <img src="/logo.png" alt="ICSBox" class="footer-logo" /></span>
<span style="flex: 1; text-align: right">
<a href="/stats" style="
color: var(--text-secondary);
font-size: 0.8em;
text-decoration: none;
">📊 Stats</a>
</span>
</footer>
{{FOOTER}}
</body>
</html>

16
data/html/footer.html Normal file
View File

@@ -0,0 +1,16 @@
<footer class="powered-by" style="display: flex; align-items: center">
<span style="flex: 1; text-align: left">
<span style="
color: var(--text-secondary);
font-size: 0.8em;
">{{VERSION}}</span>
</span>
<span>Powered by: <img src="/logo.png" alt="ICSBox" class="footer-logo" /></span>
<span style="flex: 1; text-align: right">
<a href="/stats" style="
color: var(--text-secondary);
font-size: 0.8em;
text-decoration: none;
">📊 Stats</a>
</span>
</footer>

View File

@@ -15,7 +15,7 @@
</head>
<body>
<h1>Black Hole Share</h1>
<h1><a href="/" class="home-link">Black Hole Share</a></h1>
<div class="upload-container">
<div class="upload-area">
@@ -25,6 +25,7 @@
</div>
</div>
</div>
<div id="uploadError" class="upload-error" style="display: none" role="status" aria-live="polite"></div>
<div class="duration-container">
<label for="durationSlider">Duration: <span id="durationValue">5</span> min</label>
@@ -44,23 +45,14 @@
</div>
</div>
<footer class="powered-by" style="display: flex; align-items: center">
<span style="flex: 1"></span>
<span>Powered by: <img src="logo.png" alt="ICSBox" class="footer-logo" /></span>
<span style="flex: 1; text-align: right">
<a href="/stats" style="
color: var(--text-secondary);
font-size: 0.8em;
text-decoration: none;
">📊 Stats</a>
</span>
</footer>
{{FOOTER}}
<!-- Zoom overlay -->
<div id="zoomOverlay" class="zoom-overlay" style="display: none"></div>
<script>
let currentContentData = null;
let uploadCompleted = false;
const fileInput = document.getElementById("fileInput");
const uploadZone = document.getElementById("uploadZone");
const uploadContainer = document.querySelector(".upload-container");
@@ -72,6 +64,17 @@
const linkContainer = document.getElementById("linkContainer");
const uploadedLink = document.getElementById("uploadedLink");
const clipboardMessage = document.getElementById("clipboardMessage");
const uploadError = document.getElementById("uploadError");
function formatRetryAfter(seconds) {
const safeSeconds = Math.max(0, Math.floor(seconds));
const minutes = Math.floor(safeSeconds / 60);
const remainder = safeSeconds % 60;
if (minutes > 0) {
return `${minutes}m ${remainder}s`;
}
return `${remainder}s`;
}
// Update duration display
durationSlider.addEventListener("input", function () {
@@ -115,6 +118,8 @@
};
try {
uploadError.style.display = "none";
uploadError.textContent = "";
const response = await fetch("/api/upload", {
method: "POST",
headers: {
@@ -123,10 +128,36 @@
body: JSON.stringify(payload),
});
const result = await response.json();
console.log(
`✅ Upload received!\n${JSON.stringify(result, null, 2)}`
);
let result = null;
try {
result = await response.json();
} catch (parseError) {
result = null;
}
if (!response.ok) {
const retryAfterSeconds = result && Number.isFinite(Number(result.retry_after_seconds))
? Number(result.retry_after_seconds)
: null;
let errorMessage =
(result && result.error) ||
`Upload failed (${response.status})`;
if (retryAfterSeconds !== null) {
errorMessage += ` Try again in ${formatRetryAfter(retryAfterSeconds)}.`;
}
uploadError.textContent = errorMessage;
uploadError.style.display = "block";
return;
}
if (!result || !result.link) {
uploadError.textContent = "Upload failed (invalid response)";
uploadError.style.display = "block";
return;
}
// Mark upload as completed to prevent further pastes
uploadCompleted = true;
// Hide duration controls and buttons
document.querySelector('label[for="durationSlider"]').style.display =
@@ -175,13 +206,15 @@
};
}
} catch (error) {
console.log(`❌ Error: ${error.message}`);
uploadError.textContent = `Upload failed (${error.message})`;
uploadError.style.display = "block";
}
}
// Reset to initial state
resetBtn.addEventListener("click", function () {
currentContentData = null;
uploadCompleted = false;
uploadZone.innerHTML =
"<p>Click to select file, paste image data, or drag & drop</p>";
uploadContainer.style.height = "180px";
@@ -201,6 +234,8 @@
resetBtn.style.display = "none";
linkContainer.style.display = "none";
clipboardMessage.style.display = "none";
uploadError.style.display = "none";
uploadError.textContent = "";
uploadZone.focus();
});
@@ -262,6 +297,7 @@
// Open file picker on container click (ONLY IF EMPTY)
uploadContainer.addEventListener("click", function (e) {
if (
!uploadCompleted &&
uploadContainer.style.pointerEvents !== "none" &&
!uploadZone.querySelector(".text-content") &&
!uploadZone.querySelector("img")
@@ -271,6 +307,7 @@
});
fileInput.addEventListener("change", function (e) {
if (uploadCompleted) return;
const file = e.target.files[0];
if (file) {
const reader = new FileReader();
@@ -283,6 +320,10 @@
// Handle paste from clipboard
uploadZone.addEventListener("paste", function (e) {
if (uploadCompleted) {
e.preventDefault();
return;
}
e.preventDefault();
const items = e.clipboardData.items;
@@ -310,6 +351,7 @@
function handleDrop(e) {
e.preventDefault();
if (uploadCompleted) return;
const file = e.dataTransfer.files[0];
if (file && file.type.startsWith("image/")) {
const reader = new FileReader();
@@ -368,6 +410,26 @@
}
});
function canTriggerUpload() {
return (
currentContentData &&
window.getComputedStyle(uploadBtn).display !== "none" &&
zoomOverlay.style.display !== "flex"
);
}
// ENTER TO UPLOAD (when content is ready)
document.addEventListener(
"keydown",
function (e) {
if ((e.key === "Enter" || e.code === "NumpadEnter") && canTriggerUpload()) {
e.preventDefault();
uploadBtn.click();
}
},
true
);
window.addEventListener("resize", function () {
if (currentContentData) {
displayContent(currentContentData);
@@ -376,4 +438,4 @@
</script>
</body>
</html>
</html>

View File

@@ -82,7 +82,7 @@
}
.activity-list {
max-height: 300px;
max-height: 260px;
overflow-y: auto;
font-family: "JetBrains Mono", monospace;
font-size: 0.85em;
@@ -172,17 +172,7 @@
<p>Loading statistics...</p>
</div>
<footer class="powered-by" style="display: flex; align-items: center">
<span style="flex: 1"></span>
<span>Powered by: <img src="/logo.png" alt="ICSBox" class="footer-logo" /></span>
<span style="flex: 1; text-align: right">
<a href="/stats" style="
color: var(--text-secondary);
font-size: 0.8em;
text-decoration: none;
">📊 Stats</a>
</span>
</footer>
{{FOOTER}}
<script>
async function loadStats() {

View File

@@ -29,7 +29,7 @@ body {
height: 100vh;
margin: 0 auto;
padding: 20px;
padding-bottom: 140px;
padding-bottom: 80px;
background-color: var(--bg-tertiary);
background-image:
radial-gradient(1200px 800px at 10% -20%, var(--bg-glow), transparent 60%),
@@ -91,6 +91,18 @@ h1 .home-link:hover {
transition: all 0.3s ease;
}
.upload-error {
margin: 12px 0 0 0;
padding: 10px 12px;
border: 1px solid #ff6666;
border-radius: 10px;
background-color: rgba(255, 102, 102, 0.12);
color: #ff6666;
font-size: 0.9em;
text-align: center;
box-shadow: 0 4px 12px rgba(255, 102, 102, 0.15);
}
.duration-container .button-row {
display: flex;
flex-direction: row;
@@ -453,7 +465,7 @@ h1 .home-link:hover {
/* View page styles */
body.view-page {
width: 860px;
padding-bottom: 140px;
padding-bottom: 80px;
}
.view-container {
@@ -476,6 +488,8 @@ body.view-page {
border-top: 1px solid var(--border-color);
font-size: 0.9em;
color: var(--text-secondary);
width: 100%;
z-index: 10;
}
.powered-by .home-link {

0
data/html/test/test.txt Normal file
View File

View File

@@ -19,17 +19,7 @@
</div>
</div>
<footer class="powered-by" style="display: flex; align-items: center">
<span style="flex: 1"></span>
<span>Powered by: <img src="/logo.png" alt="ICSBox" class="footer-logo" /></span>
<span style="flex: 1; text-align: right">
<a href="/stats" style="
color: var(--text-secondary);
font-size: 0.8em;
text-decoration: none;
">📊 Stats</a>
</span>
</footer>
{{FOOTER}}
<!-- Zoom overlay -->
<div id="zoomOverlay" class="zoom-overlay" style="display: none;"></div>

View File

@@ -1,2 +0,0 @@
[toolchain]
channel = "1.90.0"

View File

@@ -1,13 +1,15 @@
use actix_web::{HttpRequest, HttpResponse, get, post, web};
use base64::{Engine, engine::general_purpose};
use chrono::Utc;
use serde::Deserialize;
use serde_json::json;
use crate::{MAX_ASSET_DURATION, MIN_ASSET_DURATION};
use crate::{
LOG_FILE_NAME,
data_mgt::{Asset, AssetTracker},
logs::{LogEvent, LogEventType, log_event},
MAX_ASSET_SIZE_BYTES,
data_mgt::{AppState, Asset},
logs::LogEventType,
};
#[derive(Deserialize, Debug)]
@@ -21,51 +23,103 @@ pub struct UploadRequest {
async fn api_upload(
req: HttpRequest,
body: web::Json<UploadRequest>,
assets: web::Data<AssetTracker>,
app_state: web::Data<AppState>,
) -> Result<HttpResponse, actix_web::Error> {
// Check for rate limiting
let now = Utc::now().timestamp_millis();
let connection_info = req.connection_info();
let uploader_ip = connection_info
.realip_remote_addr()
.map(|s| s.to_string())
.or_else(|| connection_info.peer_addr().map(|value| value.to_string()))
.ok_or_else(|| actix_web::error::ErrorBadRequest("Cannot determine client ip"))?;
// Convert to bytes
let content_bytes = if body.content_type == "text/plain" {
body.content.as_bytes().to_vec()
} else {
match general_purpose::STANDARD.decode(&body.content) {
Ok(bytes) => bytes,
Err(_) => return Ok(HttpResponse::BadRequest().body("Invalid base64 payload")),
Err(_) => return Ok(HttpResponse::BadRequest().json(json!({ "error": "Invalid base64 content" }))),
}
};
let connection_info = req.connection_info();
let uploader_ip = connection_info
.realip_remote_addr()
.or_else(|| connection_info.peer_addr())
.unwrap_or("-")
.to_string();
if content_bytes.len() > MAX_ASSET_SIZE_BYTES {
let error = json!({"error": "Asset too large"});
app_state
.log_file
.write_event(LogEventType::Error(error.clone()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
return Ok(HttpResponse::PayloadTooLarge().json(json!({
"error": "Asset too large"
})));
}
let clamped_duration = body.duration.clamp(MIN_ASSET_DURATION, MAX_ASSET_DURATION);
let asset_expiration_time = now + (clamped_duration as i64 * 60 * 1000);
let (allowed, retry_after_ms) = app_state
.connection_tracker
.check(&uploader_ip, asset_expiration_time)
.await;
if !allowed {
let retry_after_seconds = retry_after_ms.map(|ms| ((ms + 999) / 1000).max(1));
let response_body = match retry_after_seconds {
Some(seconds) => json!({ "error": "Upload limit exceeded", "retry_after_seconds": seconds }),
None => json!({ "error": "Upload limit exceeded" }),
};
// return Ok(HttpResponse::TooManyRequests().body("Upload limit exceeded"));
return Ok(HttpResponse::TooManyRequests().json(response_body));
}
let asset = crate::data_mgt::Asset::new(
body.duration,
clamped_duration,
body.content_type.clone(),
content_bytes,
Some(uploader_ip.clone()),
);
let id = asset.id();
log_event(LogEventType::AssetUploaded(asset.to_value()));
assets.add_asset(asset).await;
let response_body = json!({ "link": format!("/bhs/{}", id) });
Ok(HttpResponse::Ok().json(response_body))
match app_state.assets.add_asset(asset.clone(), &app_state.log_file).await {
Ok(_) => {
app_state
.log_file
.write_event(LogEventType::AssetUploaded(asset.to_value()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
let response_body = json!({ "link": format!("/bhs/{}", id) });
Ok(HttpResponse::Ok().json(response_body))
}
Err(e) => {
let error = json!({"error": format!("Failed to store asset: {}", e)});
app_state
.log_file
.write_event(LogEventType::Error(error.clone()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
Ok(HttpResponse::InternalServerError().json(error))
}
}
}
#[get("/api/content/{id}")]
async fn api_get_asset(
req: HttpRequest,
path: web::Path<String>,
assets: web::Data<AssetTracker>,
app_state: web::Data<AppState>,
) -> Result<HttpResponse, actix_web::Error> {
log_event(LogEventType::HttpRequest(req.into()));
match assets.get_asset(&path.into_inner()).await {
app_state
.log_file
.write_event(LogEventType::HttpRequest(req.into()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
match app_state.assets.get_asset(&path.into_inner()).await {
None => Ok(HttpResponse::NotFound().body("Asset not found")),
Some(asset) => Ok(HttpResponse::Ok()
.content_type(asset.mime())
.body(asset.content().clone())),
Some(asset) => Ok(HttpResponse::Ok().content_type(asset.mime()).body(asset.content())),
}
}
@@ -90,47 +144,40 @@ struct ActivityItem {
}
#[get("/api/stats")]
async fn api_stats(assets: web::Data<AssetTracker>) -> Result<HttpResponse, actix_web::Error> {
use crate::LOG_DIR;
use std::fs;
let (active_assets, storage_bytes, image_count, text_count) = assets.stats_summary().await;
async fn api_stats(app_state: web::Data<AppState>) -> Result<HttpResponse, actix_web::Error> {
let (active_assets, storage_bytes, image_count, text_count) = app_state.assets.stats_summary().await;
let mut total_uploads = 0;
let mut total_deleted = 0;
let mut recent_activity: Vec<ActivityItem> = Vec::new();
let mut request_count: usize = 0;
let log_path = format!("{}{}", LOG_DIR, LOG_FILE_NAME);
if let Ok(content) = fs::read_to_string(&log_path) {
for line in content.lines() {
if let Ok(entry) = serde_json::from_str::<LogEvent>(line) {
match entry.event {
LogEventType::HttpRequest(_req) => {
request_count += 1;
}
LogEventType::AssetUploaded(asset) => {
let asset = serde_json::from_value::<Asset>(asset).unwrap_or_default();
total_uploads += 1;
recent_activity.push(ActivityItem {
action: "upload".to_string(),
mime: asset.mime(),
share_duration: asset.share_duration(),
timestamp: entry.time,
});
}
LogEventType::AssetDeleted(asset) => {
let asset = serde_json::from_value::<Asset>(asset).unwrap_or_default();
total_deleted += 1;
recent_activity.push(ActivityItem {
action: "delete".to_string(),
mime: asset.mime(),
share_duration: asset.share_duration(),
timestamp: entry.time,
});
}
}
let log_events = app_state.log_file.read_events().await.unwrap_or_default();
for line in log_events {
match line.event {
LogEventType::HttpRequest(_req) => {
request_count += 1;
}
LogEventType::AssetUploaded(asset) => {
let asset = serde_json::from_value::<Asset>(asset).unwrap_or_default();
total_uploads += 1;
recent_activity.push(ActivityItem {
action: "upload".to_string(),
mime: asset.mime(),
share_duration: asset.share_duration(),
timestamp: line.time,
});
}
LogEventType::AssetDeleted(asset) => {
let asset = serde_json::from_value::<Asset>(asset).unwrap_or_default();
total_deleted += 1;
recent_activity.push(ActivityItem {
action: "delete".to_string(),
mime: asset.mime(),
share_duration: asset.share_duration(),
timestamp: line.time,
});
}
LogEventType::Error(_event) => {}
}
}

View File

@@ -1,5 +1,5 @@
use std::fmt::Debug;
use std::sync::Arc;
use std::{collections::HashMap, fmt::Debug};
use anyhow::Result;
use chrono::{Duration, Utc};
@@ -7,7 +7,9 @@ use futures::lock::Mutex;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::logs::{LogEventType, log_event};
use crate::MAX_ASSETS;
use crate::logs::LogFile;
use crate::{MAX_UPLOADS_PER_USER, logs::LogEventType};
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
pub struct Asset {
@@ -85,41 +87,66 @@ impl Asset {
pub fn to_value(&self) -> Value {
serde_json::to_value(self).unwrap_or(Value::Null)
}
// pub fn save(&self) -> Result<String> {
// let id = self.id.clone();
// let path = format!("{}{}", DATA_STORAGE, self.id);
// std::fs::create_dir_all(DATA_STORAGE)?;
// std::fs::write(&path, self.to_bytes()?)?;
// Ok(id)
// }
}
#[derive(Clone)]
pub struct AssetTracker {
#[derive(Clone, Debug)]
pub struct AppState {
pub assets: AssetStorage,
pub connection_tracker: RateLimiter,
pub log_file: LogFile,
}
#[derive(Clone, Debug, Default)]
pub struct AssetStorage {
assets: Arc<Mutex<Vec<Asset>>>,
}
#[allow(dead_code)]
impl AssetTracker {
impl AssetStorage {
pub fn new() -> Self {
AssetTracker {
assets: Arc::new(Mutex::new(Vec::new())),
Self {
assets: Arc::new(Mutex::new(Vec::with_capacity(MAX_ASSETS))),
}
}
pub async fn add_asset(&self, asset: Asset) {
print!("[{}] Adding asset: {}", chrono::Local::now().to_rfc3339(), asset.id());
self.assets.lock().await.push(asset);
self.show_assets().await;
pub async fn add_asset(&self, asset: Asset, log: &LogFile) -> Result<()> {
let now = chrono::Utc::now();
let mut removed: Vec<Asset> = Vec::new();
{
let mut assets = self.assets.lock().await;
let removed_iter = assets.extract_if(.., |a| a.is_expired());
removed.extend(removed_iter);
if assets.len() >= MAX_ASSETS {
return Err(anyhow::anyhow!("Asset storage full"));
}
println!("[{}] Adding asset: {}", now.to_rfc3339(), asset.id());
assets.push(asset);
}
for asset in removed {
println!("[{}] Removing asset: {}", now.to_rfc3339(), asset.id());
log.write_event(LogEventType::AssetDeleted(asset.to_value())).await?;
}
Ok(())
}
pub async fn remove_expired(&self) {
async fn push_asset(&self, asset: Asset) {
let mut assets = self.assets.lock().await;
assets.push(asset);
}
pub async fn remove_expired(&self, log: &LogFile) {
let mut assets = self.assets.lock().await;
let removed_assets = assets.extract_if(.., |asset| asset.is_expired());
for asset in removed_assets {
println!("[{}] Removing asset: {}", chrono::Local::now().to_rfc3339(), asset.id());
log_event(LogEventType::AssetDeleted(asset.to_value()));
log.write_event(LogEventType::AssetDeleted(asset.to_value()))
.await
.unwrap();
}
}
@@ -153,7 +180,8 @@ impl AssetTracker {
pub async fn show_assets(&self) {
for asset in self.assets.lock().await.iter() {
println!(
"Asset ID: {}, Expires At: {}, MIME: {}, Size: {} bytes",
"[{}] Asset ID: {}, Expires At: {}, Mime: {}, Size: {} bytes",
chrono::Local::now().to_rfc3339(),
asset.id(),
asset.expires_at(),
asset.mime(),
@@ -166,14 +194,57 @@ impl AssetTracker {
let assets = self.assets.lock().await;
for asset in assets.iter().cloned() {
if asset.id() == id {
return Some(asset.clone());
return Some(asset);
}
}
None
}
pub async fn assets_len(&self) -> usize {
self.assets.lock().await.len()
}
}
pub async fn clear_assets(assets: AssetTracker) -> Result<()> {
assets.remove_expired().await;
#[derive(Clone, Debug, Default)]
pub struct RateLimiter {
pub clients: Arc<Mutex<HashMap<String, Vec<i64>>>>,
}
impl RateLimiter {
pub async fn check(&self, client_ip: &str, asset_exp_time: i64) -> (bool, Option<i64>) {
self.clear_expired().await;
let now = Utc::now().timestamp_millis();
let mut clients = self.clients.lock().await;
let entry = clients.entry(client_ip.to_string()).or_insert_with(Vec::new);
let ret_val = if entry.len() < MAX_UPLOADS_PER_USER {
entry.push(asset_exp_time);
(true, None)
} else {
println!(
"[{}] Rate limit exceeded for IP: {}",
chrono::Local::now().to_rfc3339(),
client_ip
);
let first_to_expire = entry.iter().min().copied().unwrap();
let retry_after_ms = (first_to_expire - now).max(1);
(false, Some(retry_after_ms))
};
ret_val
}
pub async fn clear_expired(&self) {
let mut clients = self.clients.lock().await;
let now = Utc::now().timestamp_millis();
for timestamps in clients.values_mut() {
timestamps.retain(|&timestamp| timestamp > now);
}
}
}
pub async fn clear_app_data(app_state: &AppState) -> Result<()> {
app_state.assets.remove_expired(&app_state.log_file).await;
app_state.connection_tracker.clear_expired().await;
Ok(())
}

View File

@@ -1,10 +1,73 @@
use std::{fs::OpenOptions, io::Write};
use anyhow::Result;
use std::{path::PathBuf, sync::Arc};
use actix_web::HttpRequest;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tokio::{
fs::{File, OpenOptions, rename},
io::{AsyncReadExt, AsyncWriteExt},
sync::Mutex,
};
use crate::{LOG_DIR, LOG_FILE_NAME};
#[derive(Debug, Clone)]
pub struct LogFile {
_path: PathBuf,
handle: Arc<Mutex<File>>,
}
impl LogFile {
pub async fn new(path: impl Into<PathBuf>) -> Result<Self> {
let path = path.into();
if LogFile::log_file_exist(&path).await? {
LogFile::log_file_rotate(&path).await?;
}
let handle = OpenOptions::new().create(true).append(true).open(&path).await?;
println!("Log file created at: {}", path.display());
Ok(Self {
_path: path,
handle: Arc::new(Mutex::new(handle)),
})
}
pub async fn read_events(&self) -> Result<Vec<LogEvent>> {
let mut file = File::open(&self._path).await?;
let mut contents = String::new();
file.read_to_string(&mut contents).await?;
let mut events: Vec<LogEvent> = Vec::new();
for line in contents.lines() {
match serde_json::from_str::<LogEvent>(line) {
Ok(event) => events.push(event),
Err(e) => println!("Failed to parse log line: {}: {}", e, line),
}
}
Ok(events)
}
pub async fn write_event(&self, event: LogEventType) -> Result<()> {
let log_event: LogEvent = event.into();
let line = serde_json::to_string(&log_event)? + "\n";
self.handle.lock().await.write_all(line.as_bytes()).await?;
Ok(())
}
async fn log_file_exist(path: impl Into<PathBuf>) -> Result<bool> {
if tokio::fs::metadata(path.into()).await.is_ok() { Ok(true) } else { Ok(false) }
}
async fn log_file_rotate(path: impl Into<PathBuf>) -> Result<()> {
let path: PathBuf = path.into();
let now = chrono::Utc::now().format("%Y_%m_%_d-%H%M%S").to_string();
let Some(dir) = path.parent() else {
return Err(anyhow::anyhow!("Failed to get parent directory for log rotation"));
};
let filename = path.file_name().unwrap_or_else(|| std::ffi::OsStr::new("log.txt"));
let rotated = dir.join(format!("{}_{}", now, filename.to_string_lossy()));
rename(path, rotated).await?;
Ok(())
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct LogHttpRequest {
@@ -52,6 +115,7 @@ pub enum LogEventType {
AssetUploaded(Value),
AssetDeleted(Value),
HttpRequest(LogHttpRequest),
Error(Value),
}
#[derive(Debug, Serialize, Deserialize)]
@@ -66,17 +130,3 @@ impl From<LogEventType> for LogEvent {
LogEvent { time, event }
}
}
pub fn log_event(event: LogEventType) {
let log_path = LOG_DIR.to_string() + LOG_FILE_NAME;
let Ok(mut file) = OpenOptions::new().create(true).append(true).open(log_path) else {
eprintln!("failed to open log file for asset event");
return;
};
let log_event: LogEvent = event.into();
let line = serde_json::to_string(&log_event).unwrap_or_else(|e| e.to_string());
let _ = writeln!(file, "{}", line);
}

View File

@@ -1,19 +1,39 @@
mod api;
mod data_mgt;
mod logs;
use actix_files::NamedFile;
#[cfg(test)]
mod tests;
use actix_web::{
App, HttpRequest, HttpServer, get, route,
App, HttpRequest, HttpResponse, HttpServer, get, mime, route,
web::{self},
};
use anyhow::Result;
use mime_guess::from_path;
use serde_json::Value;
use std::{env, fs, path::PathBuf, sync::LazyLock};
use std::{
env, fs,
path::{Path, PathBuf},
sync::LazyLock,
};
pub static HTML_DIR: &str = "data/html/";
pub static LOG_DIR: &str = "data/logs/";
pub static LOG_FILE_NAME: &str = "log.txt";
pub static MIN_ASSET_DURATION: u32 = 1; // in minutes
pub static MAX_ASSET_DURATION: u32 = 60; // in minutes
pub static MAX_ASSETS: usize = 1000;
pub static MAX_ASSET_SIZE_BYTES: usize = 3 * 1024 * 1024; // 3 MB
pub static MAX_UPLOADS_PER_USER: usize = 10;
pub static FOOTER_HTML: LazyLock<String> =
LazyLock::new(|| fs::read_to_string(Path::new(HTML_DIR).join("footer.html")).unwrap_or_default());
pub static HTML_VARS: LazyLock<Vec<(&str, &str)>> = LazyLock::new(|| {
vec![
("{{FOOTER}}", (*FOOTER_HTML).as_str()),
("{{VERSION}}", env!("CARGO_PKG_VERSION")),
]
});
pub static BIND_ADDR: LazyLock<String> = LazyLock::new(|| match env::var("BIND_ADDR") {
Ok(addr) => {
@@ -45,87 +65,96 @@ pub static STATIC_PAGES: LazyLock<Vec<String>> = LazyLock::new(|| {
use crate::{
api::{api_get_asset, api_stats, api_upload},
logs::{LogEventType, log_event},
data_mgt::AppState,
logs::{LogEventType, LogFile},
};
#[get("/")]
async fn index(req: HttpRequest) -> actix_web::Result<NamedFile> {
async fn index(req: HttpRequest, app_state: web::Data<AppState>) -> actix_web::Result<HttpResponse> {
let path: PathBuf = PathBuf::from(HTML_DIR.to_string() + "index.html");
log_event(LogEventType::HttpRequest(req.into()));
Ok(NamedFile::open(path)?)
app_state
.log_file
.write_event(LogEventType::HttpRequest(req.into()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
get_static_file(path).await
}
#[get("/stats")]
async fn stats(req: HttpRequest) -> actix_web::Result<NamedFile> {
async fn stats(req: HttpRequest, app_state: web::Data<AppState>) -> actix_web::Result<HttpResponse> {
let path: PathBuf = PathBuf::from(HTML_DIR.to_string() + "stats.html");
log_event(LogEventType::HttpRequest(req.into()));
Ok(NamedFile::open(path)?)
app_state
.log_file
.write_event(LogEventType::HttpRequest(req.into()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
get_static_file(path).await
}
#[get("/bhs/{id}")]
async fn view_asset(req: HttpRequest) -> actix_web::Result<NamedFile> {
async fn view_asset(req: HttpRequest, app_state: web::Data<AppState>) -> actix_web::Result<HttpResponse> {
let path: PathBuf = PathBuf::from(HTML_DIR.to_string() + "view.html");
log_event(LogEventType::HttpRequest(req.into()));
Ok(NamedFile::open(path)?)
app_state
.log_file
.write_event(LogEventType::HttpRequest(req.into()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
get_static_file(path).await
}
#[route("/{tail:.*}", method = "GET", method = "POST")]
async fn catch_all(req: HttpRequest, _payload: Option<web::Json<Value>>) -> actix_web::Result<NamedFile> {
async fn catch_all(
req: HttpRequest,
_payload: Option<web::Json<Value>>,
app_state: web::Data<AppState>,
) -> actix_web::Result<HttpResponse> {
let response = match req.uri().path() {
path if STATIC_PAGES.contains(&path[1..].into()) => {
let file_path = HTML_DIR.to_string() + path;
Ok(NamedFile::open(file_path)?)
get_static_file(file_path).await
}
_ => {
let file_path = PathBuf::from(HTML_DIR.to_string() + "error.html");
Ok(NamedFile::open(file_path)?)
get_static_file(file_path).await
}
};
log_event(LogEventType::HttpRequest(req.into()));
app_state
.log_file
.write_event(LogEventType::HttpRequest(req.into()))
.await
.unwrap_or_else(|e| println!("Failed to log HTTP request: {}", e));
response
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let _ = fs::create_dir_all(LOG_DIR);
let log_filename = format!("{}{}", LOG_DIR, LOG_FILE_NAME);
let log_filename_path = std::path::Path::new(&log_filename);
#[tokio::main]
async fn main() -> Result<()> {
let log_file = LogFile::new(format!("{}{}", LOG_DIR, LOG_FILE_NAME))
.await
.expect("Failed to create or open log file");
let time_tag = chrono::Local::now().format("%Y_%m_%d_%H_%M_%S");
if log_filename_path.exists() {
println!("File: {}, exists, rotating.", &log_filename_path.display());
fs::rename(
&log_filename_path,
format!("{}{}_{}", LOG_DIR, time_tag, &LOG_FILE_NAME),
)
.unwrap_or_else(|e| {
println!(
"No existing log file {} to rotate. Error: {}",
log_filename_path.to_string_lossy(),
e
)
});
println!("Rotated log file to: {}_{}", time_tag, &LOG_FILE_NAME);
}
let assets = data_mgt::AssetTracker::new();
let app_state = data_mgt::AppState {
assets: data_mgt::AssetStorage::new(),
connection_tracker: data_mgt::RateLimiter::default(),
log_file,
};
println!("Starting server at http://{}:{}/", *BIND_ADDR, *BIND_PORT);
let assets_clone = assets.clone();
let inner_app_state = app_state.clone();
tokio::spawn(async move {
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(1));
loop {
interval.tick().await;
if let Err(e) = data_mgt::clear_assets(assets_clone.clone()).await {
if let Err(e) = data_mgt::clear_app_data(&inner_app_state).await {
eprintln!("Error clearing assets: {}", e);
}
}
});
HttpServer::new(move || {
App::new()
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 3))
.app_data(web::Data::new(assets.clone()))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 3)) // 3MB limit
.app_data(web::Data::new(app_state.clone()))
.service(index)
.service(stats)
.service(view_asset)
@@ -136,5 +165,30 @@ async fn main() -> std::io::Result<()> {
})
.bind((BIND_ADDR.clone(), *BIND_PORT))?
.run()
.await
.await?;
Ok(())
}
pub async fn get_static_file<P: AsRef<Path>>(path: P) -> actix_web::Result<HttpResponse> {
let path = path.as_ref();
let mime = from_path(path).first_or_octet_stream();
// HTML → text + replace
if mime.type_() == mime::TEXT && mime.subtype() == mime::HTML {
let mut html = tokio::fs::read_to_string(path)
.await
.map_err(actix_web::error::ErrorInternalServerError)?;
for (k, v) in HTML_VARS.iter() {
html = html.replace(k, v);
}
return Ok(HttpResponse::Ok().content_type("text/html; charset=utf-8").body(html));
}
let bytes = tokio::fs::read(path)
.await
.map_err(actix_web::error::ErrorInternalServerError)?;
Ok(HttpResponse::Ok().content_type(mime.as_ref()).body(bytes))
}

1
src/tests.rs Normal file
View File

@@ -0,0 +1 @@