1
0
Fork 0

Stop caching upload after successful upload

Just forbid sending 2 concurrent requests, but let the user decide if
it wants to upload the same video multiple times
This commit is contained in:
Chocobozzz 2024-02-19 14:41:38 +01:00
parent ebe828ec75
commit 7b86b9b458
No known key found for this signature in database
GPG key ID: 583A612D890159BE
4 changed files with 9 additions and 85 deletions

View file

@ -198,21 +198,6 @@ describe('Test resumable upload', function () {
await checkFileSize(uploadId, 0)
})
it('Should be able to accept 2 PUT requests', async function () {
const uploadId = await prepareUpload()
const result1 = await sendChunks({ pathUploadId: uploadId })
const result2 = await sendChunks({ pathUploadId: uploadId })
expect(result1.body.video.uuid).to.exist
expect(result1.body.video.uuid).to.equal(result2.body.video.uuid)
expect(result1.headers['x-resumable-upload-cached']).to.not.exist
expect(result2.headers['x-resumable-upload-cached']).to.equal('true')
await checkFileSize(uploadId, null)
})
it('Should not have the same upload id with 2 different users', async function () {
const originalName = 'toto.mp4'
const lastModified = new Date().getTime()
@ -243,39 +228,6 @@ describe('Test resumable upload', function () {
await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
})
it('Should not cache a request after a delete', async function () {
const originalName = 'toto.mp4'
const lastModified = new Date().getTime()
const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
await sendChunks({ pathUploadId: uploadId1 })
await server.videos.endResumableUpload({ path, pathUploadId: uploadId1 })
const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
expect(uploadId1).to.equal(uploadId2)
const result2 = await sendChunks({ pathUploadId: uploadId1 })
expect(result2.headers['x-resumable-upload-cached']).to.not.exist
})
it('Should not cache after video deletion', async function () {
const originalName = 'toto.mp4'
const lastModified = new Date().getTime()
const uploadId1 = await prepareUpload({ originalName, lastModified })
const result1 = await sendChunks({ pathUploadId: uploadId1 })
await server.videos.remove({ id: result1.body.video.uuid })
const uploadId2 = await prepareUpload({ originalName, lastModified })
const result2 = await sendChunks({ pathUploadId: uploadId2 })
expect(result1.body.video.uuid).to.not.equal(result2.body.video.uuid)
expect(result2.headers['x-resumable-upload-cached']).to.not.exist
await checkFileSize(uploadId1, null)
await checkFileSize(uploadId2, null)
})
it('Should refuse an invalid digest', async function () {
const uploadId = await prepareUpload({ token: server.accessToken })

View file

@ -123,7 +123,8 @@ async function addVideoResumable (req: express.Request, res: express.Response) {
const files = { previewfile: videoInfo.previewfile, thumbnailfile: videoInfo.thumbnailfile }
const response = await addVideo({ req, res, videoPhysicalFile, videoInfo, files })
await Redis.Instance.setUploadSession(req.query.upload_id, response)
await Redis.Instance.deleteUploadSession(req.query.upload_id)
await uploadx.storage.delete(res.locals.uploadVideoFileResumable)
return res.json(response)
}

View file

@ -307,28 +307,14 @@ class Redis {
/* ************ Resumable uploads final responses ************ */
setUploadSession (uploadId: string, response?: { video: { id: number, shortUUID: string, uuid: string } }) {
return this.setValue(
'resumable-upload-' + uploadId,
response
? JSON.stringify(response)
: '',
RESUMABLE_UPLOAD_SESSION_LIFETIME
)
setUploadSession (uploadId: string) {
return this.setValue('resumable-upload-' + uploadId, '', RESUMABLE_UPLOAD_SESSION_LIFETIME)
}
doesUploadSessionExist (uploadId: string) {
return this.exists('resumable-upload-' + uploadId)
}
async getUploadSession (uploadId: string) {
const value = await this.getValue('resumable-upload-' + uploadId)
return value
? JSON.parse(value) as { video: { id: number, shortUUID: string, uuid: string } }
: undefined
}
deleteUploadSession (uploadId: string) {
return this.deleteKey('resumable-upload-' + uploadId)
}

View file

@ -2,7 +2,6 @@ import express from 'express'
import { body, header, param, query, ValidationChain } from 'express-validator'
import { arrayify } from '@peertube/peertube-core-utils'
import { HttpStatusCode, ServerErrorCode, UserRight, VideoState } from '@peertube/peertube-models'
import { isTestInstance } from '@peertube/peertube-node-utils'
import { getResumableUploadPath } from '@server/helpers/upload.js'
import { Redis } from '@server/lib/redis.js'
import { uploadx } from '@server/lib/uploadx.js'
@ -104,26 +103,12 @@ const videosAddResumableValidator = [
const sessionExists = await Redis.Instance.doesUploadSessionExist(uploadId)
if (sessionExists) {
const sessionResponse = await Redis.Instance.getUploadSession(uploadId)
res.setHeader('Retry-After', 300) // ask to retry after 5 min, knowing the upload_id is kept for up to 15 min after completion
if (!sessionResponse) {
res.setHeader('Retry-After', 300) // ask to retry after 5 min, knowing the upload_id is kept for up to 15 min after completion
return res.fail({
status: HttpStatusCode.SERVICE_UNAVAILABLE_503,
message: 'The upload is already being processed'
})
}
const videoStillExists = await VideoModel.load(sessionResponse.video.id)
if (videoStillExists) {
if (isTestInstance()) {
res.setHeader('x-resumable-upload-cached', 'true')
}
return res.json(sessionResponse)
}
return res.fail({
status: HttpStatusCode.SERVICE_UNAVAILABLE_503,
message: 'The upload is already being processed'
})
}
await Redis.Instance.setUploadSession(uploadId)