1
0
Fork 0
peertube/server/tests/api/redundancy/redundancy.ts

726 lines
20 KiB
TypeScript
Raw Normal View History

2020-01-31 10:56:52 -05:00
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
import 'mocha'
2021-02-01 05:57:21 -05:00
import * as chai from 'chai'
import { readdir } from 'fs-extra'
import * as magnetUtil from 'magnet-uri'
import { join } from 'path'
2021-07-07 04:56:45 -04:00
import { HttpStatusCode } from '@shared/core-utils'
import {
2019-02-28 05:14:26 -05:00
checkSegmentHash,
2020-01-10 04:11:28 -05:00
checkVideoFilesWereRemoved,
cleanupTests,
doubleFollow,
flushAndRunMultipleServers,
getVideo,
2019-02-28 05:14:26 -05:00
getVideoWithToken,
immutableAssign,
2019-02-28 05:14:26 -05:00
killallServers,
makeGetRequest,
removeVideo,
reRunServer,
root,
ServerInfo,
2019-02-28 05:14:26 -05:00
setAccessTokensToServers,
2021-02-01 10:05:22 -05:00
updateVideo,
uploadVideo,
viewVideo,
wait,
2021-07-07 04:56:45 -04:00
waitJobs,
2019-02-28 05:14:26 -05:00
waitUntilLog
2021-07-07 04:56:45 -04:00
} from '@shared/extra-utils'
2021-07-07 05:07:12 -04:00
import { VideoDetails, VideoPrivacy, VideoRedundancyStrategy, VideoRedundancyStrategyWithManual } from '@shared/models'
const expect = chai.expect
let servers: ServerInfo[] = []
let video1Server2UUID: string
2020-01-10 04:11:28 -05:00
let video1Server2Id: number
function checkMagnetWebseeds (file: { magnetUri: string, resolution: { id: number } }, baseWebseeds: string[], server: ServerInfo) {
const parsed = magnetUtil.decode(file.magnetUri)
for (const ws of baseWebseeds) {
const found = parsed.urlList.find(url => url === `${ws}-${file.resolution.id}.mp4`)
expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
}
expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
}
2021-02-01 05:18:50 -05:00
async function flushAndRunServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
2020-01-10 04:11:28 -05:00
const strategies: any[] = []
if (strategy !== null) {
strategies.push(
immutableAssign({
min_lifetime: '1 hour',
strategy: strategy,
size: '400KB'
}, additionalParams)
)
}
const config = {
2019-01-29 02:37:25 -05:00
transcoding: {
2021-02-01 05:18:50 -05:00
webtorrent: {
enabled: withWebtorrent
},
2019-01-29 02:37:25 -05:00
hls: {
enabled: true
}
},
redundancy: {
videos: {
check_interval: '5 seconds',
2020-01-10 04:11:28 -05:00
strategies
}
}
}
2020-01-10 04:11:28 -05:00
servers = await flushAndRunMultipleServers(3, config)
// Get the access tokens
await setAccessTokensToServers(servers)
{
2020-01-31 10:56:52 -05:00
const res = await uploadVideo(servers[1].url, servers[1].accessToken, { name: 'video 1 server 2' })
video1Server2UUID = res.body.video.uuid
2020-01-10 04:11:28 -05:00
video1Server2Id = res.body.video.id
2020-01-31 10:56:52 -05:00
await viewVideo(servers[1].url, video1Server2UUID)
}
await waitJobs(servers)
// Server 1 and server 2 follow each other
2020-01-31 10:56:52 -05:00
await doubleFollow(servers[0], servers[1])
// Server 1 and server 3 follow each other
2020-01-31 10:56:52 -05:00
await doubleFollow(servers[0], servers[2])
// Server 2 and server 3 follow each other
2020-01-31 10:56:52 -05:00
await doubleFollow(servers[1], servers[2])
await waitJobs(servers)
}
2019-01-29 02:37:25 -05:00
async function check1WebSeed (videoUUID?: string) {
if (!videoUUID) videoUUID = video1Server2UUID
const webseeds = [
2020-01-31 10:56:52 -05:00
`http://localhost:${servers[1].port}/static/webseed/${videoUUID}`
]
for (const server of servers) {
2019-01-29 02:37:25 -05:00
// With token to avoid issues with video follow constraints
const res = await getVideoWithToken(server.url, server.accessToken, videoUUID)
2019-01-29 02:37:25 -05:00
const video: VideoDetails = res.body
for (const f of video.files) {
checkMagnetWebseeds(f, webseeds, server)
}
}
}
2019-01-29 02:37:25 -05:00
async function check2Webseeds (videoUUID?: string) {
if (!videoUUID) videoUUID = video1Server2UUID
const webseeds = [
2020-01-31 10:56:52 -05:00
`http://localhost:${servers[0].port}/static/redundancy/${videoUUID}`,
`http://localhost:${servers[1].port}/static/webseed/${videoUUID}`
]
for (const server of servers) {
const res = await getVideo(server.url, videoUUID)
const video: VideoDetails = res.body
for (const file of video.files) {
checkMagnetWebseeds(file, webseeds, server)
2018-12-04 11:08:55 -05:00
await makeGetRequest({
url: servers[0].url,
statusCodeExpected: HttpStatusCode.OK_200,
2018-12-04 11:08:55 -05:00
path: '/static/redundancy/' + `${videoUUID}-${file.resolution.id}.mp4`,
contentType: null
})
await makeGetRequest({
url: servers[1].url,
statusCodeExpected: HttpStatusCode.OK_200,
2019-01-29 02:37:25 -05:00
path: `/static/webseed/${videoUUID}-${file.resolution.id}.mp4`,
2018-12-04 11:08:55 -05:00
contentType: null
})
}
}
const directories = [
'test' + servers[0].internalServerNumber + '/redundancy',
'test' + servers[1].internalServerNumber + '/videos'
]
for (const directory of directories) {
2018-12-04 11:08:55 -05:00
const files = await readdir(join(root(), directory))
expect(files).to.have.length.at.least(4)
for (const resolution of [ 240, 360, 480, 720 ]) {
expect(files.find(f => f === `${videoUUID}-${resolution}.mp4`)).to.not.be.undefined
}
}
}
2019-01-29 02:37:25 -05:00
async function check0PlaylistRedundancies (videoUUID?: string) {
if (!videoUUID) videoUUID = video1Server2UUID
for (const server of servers) {
// With token to avoid issues with video follow constraints
const res = await getVideoWithToken(server.url, server.accessToken, videoUUID)
const video: VideoDetails = res.body
expect(video.streamingPlaylists).to.be.an('array')
expect(video.streamingPlaylists).to.have.lengthOf(1)
expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
}
}
async function check1PlaylistRedundancies (videoUUID?: string) {
if (!videoUUID) videoUUID = video1Server2UUID
for (const server of servers) {
const res = await getVideo(server.url, videoUUID)
const video: VideoDetails = res.body
expect(video.streamingPlaylists).to.have.lengthOf(1)
expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
const redundancy = video.streamingPlaylists[0].redundancies[0]
expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
}
2019-03-13 11:03:03 -04:00
const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls'
const baseUrlSegment = servers[0].url + '/static/redundancy/hls'
const res = await getVideo(servers[0].url, videoUUID)
const hlsPlaylist = (res.body as VideoDetails).streamingPlaylists[0]
for (const resolution of [ 240, 360, 480, 720 ]) {
await checkSegmentHash(baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist)
}
2019-01-29 02:37:25 -05:00
const directories = [
'test' + servers[0].internalServerNumber + '/redundancy/hls',
'test' + servers[1].internalServerNumber + '/streaming-playlists/hls'
]
for (const directory of directories) {
2019-01-29 02:37:25 -05:00
const files = await readdir(join(root(), directory, videoUUID))
expect(files).to.have.length.at.least(4)
for (const resolution of [ 240, 360, 480, 720 ]) {
const filename = `${videoUUID}-${resolution}-fragmented.mp4`
expect(files.find(f => f === filename)).to.not.be.undefined
2019-01-29 02:37:25 -05:00
}
}
}
2020-01-10 04:11:28 -05:00
async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
let totalSize: number = null
let statsLength = 1
if (strategy !== 'manual') {
totalSize = 409600
statsLength = 2
}
2021-07-07 05:07:12 -04:00
const data = await servers[0].statsCommand.get()
2020-01-10 04:11:28 -05:00
expect(data.videosRedundancy).to.have.lengthOf(statsLength)
2019-01-29 02:37:25 -05:00
2020-01-10 04:11:28 -05:00
const stat = data.videosRedundancy[0]
2019-01-29 02:37:25 -05:00
expect(stat.strategy).to.equal(strategy)
2020-01-10 04:11:28 -05:00
expect(stat.totalSize).to.equal(totalSize)
return stat
}
2021-02-01 05:57:21 -05:00
async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
2020-01-10 04:11:28 -05:00
const stat = await checkStatsGlobal(strategy)
2019-08-13 04:22:54 -04:00
expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
2021-02-01 05:57:21 -05:00
expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
2019-01-29 02:37:25 -05:00
expect(stat.totalVideos).to.equal(1)
}
2021-02-01 05:18:50 -05:00
async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
2020-01-10 04:11:28 -05:00
const stat = await checkStatsGlobal(strategy)
2019-01-29 02:37:25 -05:00
expect(stat.totalUsed).to.equal(0)
expect(stat.totalVideoFiles).to.equal(0)
expect(stat.totalVideos).to.equal(0)
}
2021-07-07 03:16:40 -04:00
async function findServerFollows () {
const body = await servers[0].followsCommand.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
const follows = body.data
const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`)
const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`)
return { server2, server3 }
}
async function enableRedundancyOnServer1 () {
2021-07-07 04:56:45 -04:00
await servers[0].redundancyCommand.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
2021-07-07 03:16:40 -04:00
const { server2, server3 } = await findServerFollows()
expect(server3).to.not.be.undefined
expect(server3.following.hostRedundancyAllowed).to.be.false
expect(server2).to.not.be.undefined
expect(server2.following.hostRedundancyAllowed).to.be.true
}
async function disableRedundancyOnServer1 () {
2021-07-07 04:56:45 -04:00
await servers[0].redundancyCommand.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
2021-07-07 03:16:40 -04:00
const { server2, server3 } = await findServerFollows()
expect(server3).to.not.be.undefined
expect(server3.following.hostRedundancyAllowed).to.be.false
expect(server2).to.not.be.undefined
expect(server2.following.hostRedundancyAllowed).to.be.false
}
describe('Test videos redundancy', function () {
describe('With most-views strategy', function () {
const strategy = 'most-views'
before(function () {
this.timeout(120000)
2019-04-24 09:10:37 -04:00
return flushAndRunServers(strategy)
})
it('Should have 1 webseed on the first video', async function () {
2019-01-29 02:37:25 -05:00
await check1WebSeed()
await check0PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWithoutRedundancy(strategy)
})
it('Should enable redundancy on server 1', function () {
return enableRedundancyOnServer1()
})
2018-11-15 05:20:23 -05:00
it('Should have 2 webseeds on the first video', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await waitUntilLog(servers[0], 'Duplicated ', 5)
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await check2Webseeds()
await check1PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWith1Redundancy(strategy)
})
it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
await disableRedundancyOnServer1()
await waitJobs(servers)
await wait(5000)
2019-01-29 02:37:25 -05:00
await check1WebSeed()
await check0PlaylistRedundancies()
2020-01-24 10:48:05 -05:00
await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ 'videos', join('playlists', 'hls') ])
})
2019-04-24 09:10:37 -04:00
after(async function () {
return cleanupTests(servers)
})
})
describe('With trending strategy', function () {
const strategy = 'trending'
before(function () {
this.timeout(120000)
2019-04-24 09:10:37 -04:00
return flushAndRunServers(strategy)
})
it('Should have 1 webseed on the first video', async function () {
2019-01-29 02:37:25 -05:00
await check1WebSeed()
await check0PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWithoutRedundancy(strategy)
})
it('Should enable redundancy on server 1', function () {
return enableRedundancyOnServer1()
})
2018-11-15 05:20:23 -05:00
it('Should have 2 webseeds on the first video', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await waitUntilLog(servers[0], 'Duplicated ', 5)
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await check2Webseeds()
await check1PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWith1Redundancy(strategy)
})
it('Should unfollow on server 1 and remove duplicated videos', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
2021-07-07 03:16:40 -04:00
await servers[0].followsCommand.unfollow({ target: servers[1] })
await waitJobs(servers)
await wait(5000)
2019-01-29 02:37:25 -05:00
await check1WebSeed()
await check0PlaylistRedundancies()
2020-01-24 10:48:05 -05:00
await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ 'videos' ])
})
2019-04-24 09:10:37 -04:00
after(async function () {
await cleanupTests(servers)
})
})
describe('With recently added strategy', function () {
const strategy = 'recently-added'
before(function () {
this.timeout(120000)
2019-04-24 09:10:37 -04:00
return flushAndRunServers(strategy, { min_views: 3 })
})
it('Should have 1 webseed on the first video', async function () {
2019-01-29 02:37:25 -05:00
await check1WebSeed()
await check0PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWithoutRedundancy(strategy)
})
it('Should enable redundancy on server 1', function () {
return enableRedundancyOnServer1()
})
it('Should still have 1 webseed on the first video', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
await waitJobs(servers)
await wait(15000)
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await check1WebSeed()
await check0PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWithoutRedundancy(strategy)
})
it('Should view 2 times the first video to have > min_views config', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
2020-01-31 10:56:52 -05:00
await viewVideo(servers[0].url, video1Server2UUID)
await viewVideo(servers[2].url, video1Server2UUID)
await wait(10000)
await waitJobs(servers)
})
2018-11-15 05:20:23 -05:00
it('Should have 2 webseeds on the first video', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await waitUntilLog(servers[0], 'Duplicated ', 5)
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await check2Webseeds()
await check1PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWith1Redundancy(strategy)
})
it('Should remove the video and the redundancy files', async function () {
this.timeout(20000)
await removeVideo(servers[1].url, servers[1].accessToken, video1Server2UUID)
await waitJobs(servers)
for (const server of servers) {
2020-01-24 10:48:05 -05:00
await checkVideoFilesWereRemoved(video1Server2UUID, server.internalServerNumber)
}
})
2019-04-24 09:10:37 -04:00
after(async function () {
await cleanupTests(servers)
})
})
2021-02-01 05:18:50 -05:00
describe('With only HLS files', function () {
const strategy = 'recently-added'
before(async function () {
this.timeout(120000)
await flushAndRunServers(strategy, { min_views: 3 }, false)
})
it('Should have 0 playlist redundancy on the first video', async function () {
await check1WebSeed()
await check0PlaylistRedundancies()
})
it('Should enable redundancy on server 1', function () {
return enableRedundancyOnServer1()
})
it('Should still have 0 redundancy on the first video', async function () {
this.timeout(80000)
await waitJobs(servers)
await wait(15000)
await waitJobs(servers)
await check0PlaylistRedundancies()
await checkStatsWithoutRedundancy(strategy)
})
it('Should have 1 redundancy on the first video', async function () {
this.timeout(160000)
await viewVideo(servers[0].url, video1Server2UUID)
await viewVideo(servers[2].url, video1Server2UUID)
await wait(10000)
await waitJobs(servers)
await waitJobs(servers)
await waitUntilLog(servers[0], 'Duplicated ', 1)
await waitJobs(servers)
await check1PlaylistRedundancies()
2021-02-01 05:57:21 -05:00
await checkStatsWith1Redundancy(strategy, true)
2021-02-01 05:18:50 -05:00
})
it('Should remove the video and the redundancy files', async function () {
this.timeout(20000)
await removeVideo(servers[1].url, servers[1].accessToken, video1Server2UUID)
await waitJobs(servers)
for (const server of servers) {
await checkVideoFilesWereRemoved(video1Server2UUID, server.internalServerNumber)
}
})
2021-02-01 05:57:21 -05:00
after(async function () {
await cleanupTests(servers)
})
2021-02-01 05:18:50 -05:00
})
2020-01-10 04:11:28 -05:00
describe('With manual strategy', function () {
before(function () {
this.timeout(120000)
return flushAndRunServers(null)
})
it('Should have 1 webseed on the first video', async function () {
await check1WebSeed()
await check0PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWithoutRedundancy('manual')
2020-01-10 04:11:28 -05:00
})
it('Should create a redundancy on first video', async function () {
2021-07-07 04:56:45 -04:00
await servers[0].redundancyCommand.addVideo({ videoId: video1Server2Id })
2020-01-10 04:11:28 -05:00
})
it('Should have 2 webseeds on the first video', async function () {
this.timeout(80000)
await waitJobs(servers)
await waitUntilLog(servers[0], 'Duplicated ', 5)
await waitJobs(servers)
await check2Webseeds()
await check1PlaylistRedundancies()
2021-02-01 05:18:50 -05:00
await checkStatsWith1Redundancy('manual')
2020-01-10 04:11:28 -05:00
})
it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
this.timeout(80000)
2021-07-07 04:56:45 -04:00
const body = await servers[0].redundancyCommand.listVideos({ target: 'remote-videos' })
2020-01-10 04:11:28 -05:00
2021-07-07 04:56:45 -04:00
const videos = body.data
2020-01-10 04:11:28 -05:00
expect(videos).to.have.lengthOf(1)
const video = videos[0]
2021-07-07 04:56:45 -04:00
2020-01-10 04:11:28 -05:00
for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
2021-07-07 04:56:45 -04:00
await servers[0].redundancyCommand.removeVideo({ redundancyId: r.id })
2020-01-10 04:11:28 -05:00
}
await waitJobs(servers)
await wait(5000)
await check1WebSeed()
await check0PlaylistRedundancies()
await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].serverNumber, [ 'videos' ])
})
after(async function () {
await cleanupTests(servers)
})
})
describe('Test expiration', function () {
const strategy = 'recently-added'
async function checkContains (servers: ServerInfo[], str: string) {
for (const server of servers) {
const res = await getVideo(server.url, video1Server2UUID)
const video: VideoDetails = res.body
for (const f of video.files) {
expect(f.magnetUri).to.contain(str)
}
}
}
async function checkNotContains (servers: ServerInfo[], str: string) {
for (const server of servers) {
const res = await getVideo(server.url, video1Server2UUID)
const video: VideoDetails = res.body
for (const f of video.files) {
expect(f.magnetUri).to.not.contain(str)
}
}
}
before(async function () {
this.timeout(120000)
2019-04-24 09:10:37 -04:00
await flushAndRunServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
await enableRedundancyOnServer1()
})
it('Should still have 2 webseeds after 10 seconds', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
await wait(10000)
try {
await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
} catch {
// Maybe a server deleted a redundancy in the scheduler
await wait(2000)
await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
}
})
it('Should stop server 1 and expire video redundancy', async function () {
2019-01-29 02:37:25 -05:00
this.timeout(80000)
killallServers([ servers[0] ])
2018-11-15 05:20:23 -05:00
await wait(15000)
await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
})
2019-04-24 09:10:37 -04:00
after(async function () {
await cleanupTests(servers)
})
})
describe('Test file replacement', function () {
let video2Server2UUID: string
const strategy = 'recently-added'
before(async function () {
this.timeout(120000)
2019-04-24 09:10:37 -04:00
await flushAndRunServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
await enableRedundancyOnServer1()
await waitJobs(servers)
2019-01-29 02:37:25 -05:00
await waitUntilLog(servers[0], 'Duplicated ', 5)
await waitJobs(servers)
2021-02-01 10:05:22 -05:00
await check2Webseeds(video1Server2UUID)
await check1PlaylistRedundancies(video1Server2UUID)
2021-02-01 05:18:50 -05:00
await checkStatsWith1Redundancy(strategy)
2021-02-01 10:05:22 -05:00
const res = await uploadVideo(servers[1].url, servers[1].accessToken, { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE })
video2Server2UUID = res.body.video.uuid
2021-02-01 10:05:22 -05:00
// Wait transcoding before federation
await waitJobs(servers)
await updateVideo(servers[1].url, servers[1].accessToken, video2Server2UUID, { privacy: VideoPrivacy.PUBLIC })
})
2018-11-15 05:20:23 -05:00
it('Should cache video 2 webseeds on the first video', async function () {
this.timeout(120000)
await waitJobs(servers)
2018-11-15 05:20:23 -05:00
let checked = false
2018-11-15 05:20:23 -05:00
while (checked === false) {
await wait(1000)
try {
2019-01-29 02:37:25 -05:00
await check1WebSeed(video1Server2UUID)
await check0PlaylistRedundancies(video1Server2UUID)
2021-02-01 10:05:22 -05:00
2019-01-29 02:37:25 -05:00
await check2Webseeds(video2Server2UUID)
await check1PlaylistRedundancies(video2Server2UUID)
2018-11-15 05:20:23 -05:00
checked = true
} catch {
checked = false
}
}
})
2019-01-29 02:37:25 -05:00
it('Should disable strategy and remove redundancies', async function () {
this.timeout(80000)
await waitJobs(servers)
2020-01-31 10:56:52 -05:00
killallServers([ servers[0] ])
await reRunServer(servers[0], {
2019-01-29 02:37:25 -05:00
redundancy: {
videos: {
check_interval: '1 second',
strategies: []
}
}
})
await waitJobs(servers)
2020-01-24 10:48:05 -05:00
await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ join('redundancy', 'hls') ])
2019-01-29 02:37:25 -05:00
})
2019-04-24 09:10:37 -04:00
after(async function () {
await cleanupTests(servers)
})
})
})