Add chunked uploads (Resumable curently broken)

This commit is contained in:
2026-04-14 16:39:43 +02:00
parent 8ae5dfc483
commit 6065b4d95f
8 changed files with 428 additions and 84 deletions

View File

@@ -1,3 +1,4 @@
uploads/**
data/**
logs/**
tmp/**

1
.gitignore vendored
View File

@@ -2,4 +2,5 @@
data/**
uploads/**
tmp/**
.env

View File

@@ -39,6 +39,13 @@ func main() {
return
}
// create temp folder
path := "./tmp"
if err := os.MkdirAll(path, os.ModePerm); err != nil {
fmt.Printf("Error creating temp folder: %v\n", err)
return
}
r := gin.Default()
r.MaxMultipartMemory = 10 << 30

View File

@@ -114,6 +114,13 @@ func RateLimitByIPDynamic(maxFn func() int, per time.Duration, burstFn func() in
}
return func(c *gin.Context) {
// Kinda a shitty fix
if c.FullPath() == "/api/files/upload/chunk" || c.FullPath() == "/api/files/upload/init" || c.FullPath() == "/api/files/upload/complete" {
c.Next()
return
}
now := time.Now()
cleanup(now)

View File

@@ -5,8 +5,10 @@ import (
"ResendIt/internal/notify"
"ResendIt/internal/util"
"fmt"
"io"
"log"
"net/http"
"os"
"path/filepath"
"strconv"
"time"
@@ -238,3 +240,161 @@ func (h *Handler) Export(c *gin.Context) {
c.JSON(http.StatusOK, records)
}
// Chunked stuff
func (h *Handler) UploadInit(c *gin.Context) {
var req struct {
Filename string `json:"filename"`
TotalChunks int `json:"totalChunks"`
Size int64 `json:"size"`
}
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(400, gin.H{"error": "invalid request"})
return
}
fileID := util.RandomString(32)
// create temp folder
path := filepath.Join("tmp", fileID)
if err := os.MkdirAll(path, os.ModePerm); err != nil {
c.JSON(500, gin.H{"error": "failed to create temp dir"})
return
}
c.JSON(200, gin.H{
"fileId": fileID,
})
}
func (h *Handler) UploadChunk(c *gin.Context) {
fileID := c.GetHeader("fileId")
chunkIndex := c.GetHeader("chunkIndex")
if fileID == "" || chunkIndex == "" {
c.JSON(400, gin.H{"error": "missing headers"})
return
}
idx, err := strconv.Atoi(chunkIndex)
if err != nil {
c.JSON(400, gin.H{"error": "invalid chunkIndex"})
return
}
file, err := c.FormFile("chunk")
if err != nil {
c.JSON(400, gin.H{"error": "missing chunk"})
return
}
src, err := file.Open()
if err != nil {
c.JSON(500, gin.H{"error": "cannot open chunk"})
return
}
defer src.Close()
chunkPath := filepath.Join("tmp", fileID, fmt.Sprintf("chunk_%d", idx))
dst, err := os.Create(chunkPath)
if err != nil {
c.JSON(500, gin.H{"error": "cannot save chunk"})
return
}
defer dst.Close()
if _, err := io.Copy(dst, src); err != nil {
c.JSON(500, gin.H{"error": "write failed"})
return
}
c.JSON(200, gin.H{"status": "ok"})
}
func (h *Handler) UploadComplete(c *gin.Context) {
var req struct {
FileID string `json:"fileId"`
Filename string `json:"filename"`
TotalChunks int `json:"totalChunks"`
}
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(400, gin.H{"error": "invalid request"})
return
}
tmpDir := filepath.Join("tmp", req.FileID)
// create pipe to stream into your existing service
pr, pw := io.Pipe()
go func() {
defer pw.Close()
for i := 0; i < req.TotalChunks; i++ {
chunkPath := filepath.Join(tmpDir, fmt.Sprintf("chunk_%d", i))
f, err := os.Open(chunkPath)
if err != nil {
pw.CloseWithError(err)
return
}
if _, err := io.Copy(pw, f); err != nil {
f.Close()
pw.CloseWithError(err)
return
}
f.Close()
}
}()
// reuse your existing upload logic 👇
record, err := h.service.UploadFile(
req.Filename,
pr,
false,
24*time.Hour,
)
if err != nil {
c.JSON(500, gin.H{"error": err.Error()})
return
}
// cleanup temp
_ = os.RemoveAll(tmpDir)
c.JSON(200, gin.H{
"id": record.ID,
"view_key": record.ViewID,
})
}
func (h *Handler) UploadStatus(c *gin.Context) {
fileID := c.Param("fileId")
dir := filepath.Join("tmp", fileID)
files, err := os.ReadDir(dir)
if err != nil {
c.JSON(404, gin.H{"error": "not found"})
return
}
var uploaded []int
for _, f := range files {
var idx int
_, err := fmt.Sscanf(f.Name(), "chunk_%d", &idx)
if err == nil {
uploaded = append(uploaded, idx)
}
}
c.JSON(200, gin.H{
"uploadedChunks": uploaded,
})
}

View File

@@ -16,6 +16,12 @@ func RegisterRoutes(r *gin.RouterGroup, h *Handler) {
files.GET("/view/:id", h.View)
files.GET("/delete/:del_id", h.Delete)
// Chunked upload endpoints
files.POST("/upload/init", h.UploadInit)
files.POST("/upload/chunk", h.UploadChunk)
files.POST("/upload/complete", h.UploadComplete)
files.GET("/upload/status/:fileId", h.UploadStatus)
adminRoutes := files.Group("/admin")
adminRoutes.Use(middleware.AuthMiddleware())
adminRoutes.Use(middleware.RequireRole("admin"))

View File

@@ -2,6 +2,7 @@ package util
import (
"fmt"
"math/rand"
"strings"
)
@@ -41,3 +42,12 @@ func SafeFilename(name string) string {
}
return string(out)
}
func RandomString(n int) string {
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
b := make([]byte, n)
for i := range b {
b[i] = letters[rand.Intn(len(letters))]
}
return string(b)
}

View File

@@ -4,7 +4,6 @@ const uploadBtn = document.getElementById('uploadBtn');
const cancelBtn = document.getElementById('cancelBtn');
const progressText = document.getElementById("progress-text");
const statsText = document.getElementById("stats-text");
const speedText = document.getElementById("speed-text");
const etaText = document.getElementById("eta-text");
const progressBar = document.getElementById("progress-bar");
@@ -12,13 +11,17 @@ const progressContainer = document.getElementById("progress-container");
let currentXhr = null;
function formatBytes(bytes, decimals = 2) {
const CHUNK_THRESHOLD = 1024 * 1024 * 1024;
const CHUNK_SIZE = 10 * 1024 * 1024;
const MAX_PARALLEL_UPLOADS = 4;
const MAX_RETRIES = 3;
function formatBytes(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const dm = decimals < 0 ? 0 : decimals;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
return (bytes / Math.pow(k, i)).toFixed(2) + ' ' + sizes[i];
}
function formatTime(seconds) {
@@ -30,18 +33,52 @@ function formatTime(seconds) {
h > 0 ? h : null,
(h > 0 ? m.toString().padStart(2, '0') : m),
s.toString().padStart(2, '0')
].filter(x => x !== null).join(':');
].filter(Boolean).join(':');
}
function startUI() {
uploadBtn.disabled = true;
uploadBtn.innerText = "UPLOADING...";
cancelBtn.classList.remove('hidden');
progressContainer.classList.remove("hidden");
progressText.classList.remove("hidden");
document.getElementById("stats-text").classList.remove("hidden");
}
function updateProgress(loaded, total, startTime) {
const percent = Math.round((loaded / total) * 100);
progressBar.style.width = percent + "%";
progressText.innerText = percent + "%";
const elapsed = (Date.now() - startTime) / 1000;
if (elapsed <= 0) return;
const speed = loaded / elapsed;
const remaining = total - loaded;
speedText.innerText = formatBytes(speed) + "/S";
etaText.innerText = formatTime(remaining / speed);
}
function redirect(data) {
const key = data.view_key;
if (!key) {
alert("Invalid server response");
return;
}
window.location.href = "/f/" + key;
}
zone.onclick = () => input.click();
zone.ondragover = (e) => {
zone.ondragover = e => {
e.preventDefault();
zone.classList.add('active');
};
zone.ondragleave = () => zone.classList.remove('active');
zone.ondrop = (e) => {
zone.ondrop = e => {
e.preventDefault();
zone.classList.remove('active');
if (e.dataTransfer.files.length) {
@@ -51,95 +88,70 @@ zone.ondrop = (e) => {
};
input.onchange = () => {
if (input.files.length) {
showFiles(input.files);
uploadBtn.disabled = false;
} else {
const files = Array.from(input.files || []);
if (!files.length) {
uploadBtn.disabled = true;
return;
}
};
function showFiles(fileList) {
const files = Array.from(fileList || []);
if (files.length === 0) return;
const total = files.reduce((acc, f) => acc + f.size, 0);
if (files.length === 1) {
const total = files.reduce((a, f) => a + f.size, 0);
document.getElementById('dz-text').innerText =
`${files[0].name} [${formatBytes(files[0].size)}]`;
} else {
document.getElementById('dz-text').innerText =
`${files.length} FILES [${formatBytes(total)}] — will be zipped`;
}
}
files.length === 1
? `${files[0].name} [${formatBytes(files[0].size)}]`
: `${files.length} FILES [${formatBytes(total)}]`;
uploadBtn.disabled = false;
};
uploadBtn.onclick = () => {
if (!input.files.length) return;
if (input.files.length === 1) {
handleUploadSingle(input.files[0]);
const files = input.files;
if (!files.length) return;
if (files.length === 1 && files[0].size > CHUNK_THRESHOLD) {
uploadChunked(files[0]);
} else if (files.length === 1) {
uploadSingle(files[0]);
} else {
handleUploadMulti(input.files);
uploadMulti(files);
}
};
cancelBtn.onclick = (e) => {
cancelBtn.onclick = e => {
e.stopPropagation();
if (currentXhr) {
currentXhr.abort();
alert("Upload cancelled.");
if (currentXhr) currentXhr.abort();
localStorage.clear();
location.reload();
}
};
function commonFormData() {
const fd = new FormData();
fd.append("once", document.getElementById("once").checked ? "true" : "false");
const hours = parseInt(document.getElementById("duration").value, 10);
fd.append("duration", hours);
fd.append("duration", parseInt(document.getElementById("duration").value, 10));
return fd;
}
function startUploadUI() {
uploadBtn.disabled = true;
uploadBtn.innerText = "UPLOADING...";
cancelBtn.classList.remove('hidden');
progressContainer.classList.remove("hidden");
progressText.classList.remove("hidden");
statsText.classList.remove("hidden");
}
function uploadSingle(file) {
startUI();
function setupXHRHandlers(xhr) {
let startTime = Date.now();
const fd = commonFormData();
fd.append("file", file);
xhr.upload.onprogress = (e) => {
if (e.lengthComputable) {
const percent = Math.round((e.loaded / e.total) * 100);
progressBar.style.width = percent + "%";
progressText.innerText = percent + "%";
const xhr = new XMLHttpRequest();
currentXhr = xhr;
const elapsedSeconds = (Date.now() - startTime) / 1000;
if (elapsedSeconds > 0) {
const bytesPerSecond = e.loaded / elapsedSeconds;
const remainingBytes = e.total - e.loaded;
const secondsRemaining = remainingBytes / bytesPerSecond;
speedText.innerText = formatBytes(bytesPerSecond) + "/S";
etaText.innerText = formatTime(secondsRemaining);
}
}
const startTime = Date.now();
xhr.upload.onprogress = e => {
if (e.lengthComputable) updateProgress(e.loaded, file.size, startTime);
};
xhr.onload = () => {
if (xhr.status >= 200 && xhr.status < 300) {
try {
const data = JSON.parse(xhr.responseText);
if (data.error) throw new Error(data.error);
window.location.href = "/f/" + data.view_key;
} catch (err) {
console.error("Invalid response:", xhr.responseText);
if (xhr.status < 200 || xhr.status >= 300) throw new Error();
redirect(JSON.parse(xhr.responseText));
} catch {
alert("Server error");
}
} else {
alert("Upload failed");
}
};
xhr.onerror = () => {
@@ -148,30 +160,170 @@ function setupXHRHandlers(xhr) {
location.reload();
}
};
}
function handleUploadSingle(file) {
startUploadUI();
const fd = commonFormData();
fd.append("file", file);
const xhr = new XMLHttpRequest();
currentXhr = xhr;
setupXHRHandlers(xhr);
xhr.open("POST", "/api/files/upload");
xhr.send(fd);
}
function handleUploadMulti(fileList) {
startUploadUI();
function uploadMulti(files) {
startUI();
const fd = commonFormData();
Array.from(fileList).forEach(f => fd.append("files", f));
const list = Array.from(files);
list.forEach(f => fd.append("files", f));
const total = list.reduce((a, f) => a + f.size, 0);
const xhr = new XMLHttpRequest();
currentXhr = xhr;
setupXHRHandlers(xhr);
const startTime = Date.now();
xhr.upload.onprogress = e => {
if (e.lengthComputable) updateProgress(e.loaded, total, startTime);
};
xhr.onload = () => {
try {
if (xhr.status < 200 || xhr.status >= 300) throw new Error();
redirect(JSON.parse(xhr.responseText));
} catch {
alert("Server error");
}
};
xhr.onerror = () => {
if (xhr.statusText !== "abort") {
alert("Upload failed");
location.reload();
}
};
xhr.open("POST", "/api/files/upload-multi");
xhr.send(fd);
}
async function uploadChunked(file) {
startUI();
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
const initRes = await fetch("/api/files/upload/init", {
method: "POST",
headers: {"Content-Type": "application/json"},
body: JSON.stringify({
filename: file.name,
totalChunks,
size: file.size
})
});
const { fileId } = await initRes.json();
let uploadedBytes = 0;
const startTime = Date.now();
const chunks = Array.from({ length: totalChunks }, (_, i) => ({
index: i,
start: i * CHUNK_SIZE,
end: Math.min((i + 1) * CHUNK_SIZE, file.size),
retries: 0,
uploading: false,
done: false
}));
let active = 0;
let completed = 0;
function uploadChunk(chunk) {
return new Promise((res, rej) => {
const blob = file.slice(chunk.start, chunk.end);
const fd = new FormData();
fd.append("chunk", blob);
const xhr = new XMLHttpRequest();
currentXhr = xhr;
let last = 0;
xhr.upload.onprogress = e => {
if (!e.lengthComputable) return;
const delta = e.loaded - last;
last = e.loaded;
uploadedBytes += delta;
updateProgress(uploadedBytes, file.size, startTime);
};
xhr.onload = () => {
if (xhr.status >= 200 && xhr.status < 300) {
chunk.done = true;
completed++;
res();
} else {
rej();
}
};
xhr.onerror = rej;
xhr.open("POST", "/api/files/upload/chunk");
xhr.setRequestHeader("fileId", fileId);
xhr.setRequestHeader("chunkIndex", chunk.index);
xhr.send(fd);
});
}
return new Promise((resolve, reject) => {
function next() {
if (completed === totalChunks) return finish();
while (active < MAX_PARALLEL_UPLOADS) {
const chunk = chunks.find(c => !c.done && !c.uploading);
if (!chunk) break;
chunk.uploading = true;
active++;
uploadChunk(chunk)
.then(() => {
active--;
next();
})
.catch(() => {
active--;
if (chunk.retries++ < MAX_RETRIES) {
chunk.uploading = false;
} else {
reject();
}
next();
});
}
}
async function finish() {
try {
const res = await fetch("/api/files/upload/complete", {
method: "POST",
headers: {"Content-Type": "application/json"},
body: JSON.stringify({
fileId,
filename: file.name,
totalChunks
})
});
redirect(await res.json());
resolve();
} catch {
reject();
}
}
next();
});
}
function copy(id) {
const el = document.getElementById(id);
el.select();