1
0
Fork 0
peertube/server/tests/feeds/feeds.ts

225 lines
8.4 KiB
TypeScript
Raw Normal View History

2020-01-31 10:56:52 -05:00
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2018-06-08 14:34:37 -04:00
import * as chai from 'chai'
import 'mocha'
import {
2019-04-24 09:10:37 -04:00
cleanupTests,
2018-09-13 03:48:34 -04:00
createUser,
2018-06-08 14:34:37 -04:00
doubleFollow,
flushAndRunMultipleServers,
2019-05-31 08:02:26 -04:00
getJSONfeed,
getMyUserInformation,
2018-06-08 14:34:37 -04:00
getXMLfeed,
ServerInfo,
setAccessTokensToServers,
2019-05-31 08:02:26 -04:00
uploadVideo,
userLogin
} from '../../../shared/extra-utils'
2018-06-08 14:34:37 -04:00
import * as libxmljs from 'libxmljs'
import { addVideoCommentThread } from '../../../shared/extra-utils/videos/video-comments'
import { waitJobs } from '../../../shared/extra-utils/server/jobs'
2018-09-13 03:48:34 -04:00
import { User } from '../../../shared/models/users'
import { VideoPrivacy } from '@shared/models'
2018-06-08 14:34:37 -04:00
chai.use(require('chai-xml'))
chai.use(require('chai-json-schema'))
chai.config.includeStack = true
const expect = chai.expect
describe('Test syndication feeds', () => {
let servers: ServerInfo[] = []
2018-09-13 03:48:34 -04:00
let userAccessToken: string
2019-05-31 08:02:26 -04:00
let rootAccountId: number
let rootChannelId: number
let userAccountId: number
let userChannelId: number
2018-06-08 14:34:37 -04:00
before(async function () {
this.timeout(120000)
// Run servers
servers = await flushAndRunMultipleServers(2)
await setAccessTokensToServers(servers)
await doubleFollow(servers[0], servers[1])
2018-09-13 03:48:34 -04:00
{
const res = await getMyUserInformation(servers[0].url, servers[0].accessToken)
const user: User = res.body
2019-05-31 08:02:26 -04:00
rootAccountId = user.account.id
rootChannelId = user.videoChannels[0].id
2018-06-08 14:34:37 -04:00
}
2018-09-13 03:48:34 -04:00
{
const attr = { username: 'john', password: 'password' }
2020-01-31 10:56:52 -05:00
await createUser({ url: servers[0].url, accessToken: servers[0].accessToken, username: attr.username, password: attr.password })
2018-09-13 03:48:34 -04:00
userAccessToken = await userLogin(servers[0], attr)
const res = await getMyUserInformation(servers[0].url, userAccessToken)
const user: User = res.body
2019-05-31 08:02:26 -04:00
userAccountId = user.account.id
userChannelId = user.videoChannels[0].id
2018-09-13 03:48:34 -04:00
}
{
2020-01-31 10:56:52 -05:00
await uploadVideo(servers[0].url, userAccessToken, { name: 'user video' })
2018-09-13 03:48:34 -04:00
}
{
const videoAttributes = {
name: 'my super name for server 1',
description: 'my super description for server 1',
fixture: 'video_short.webm'
}
2020-01-31 10:56:52 -05:00
const res = await uploadVideo(servers[0].url, servers[0].accessToken, videoAttributes)
2018-09-13 03:48:34 -04:00
const videoId = res.body.video.id
2020-01-31 10:56:52 -05:00
await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 1')
await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 2')
2018-09-13 03:48:34 -04:00
}
2018-06-08 14:34:37 -04:00
{
const videoAttributes = { name: 'unlisted video', privacy: VideoPrivacy.UNLISTED }
const res = await uploadVideo(servers[0].url, servers[0].accessToken, videoAttributes)
const videoId = res.body.video.id
await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'comment on unlisted video')
}
await waitJobs(servers)
2018-06-08 14:34:37 -04:00
})
describe('All feed', function () {
it('Should be well formed XML (covers RSS 2.0 and ATOM 1.0 endpoints)', async function () {
for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
2020-01-31 10:56:52 -05:00
const rss = await getXMLfeed(servers[0].url, feed)
2018-06-08 14:34:37 -04:00
expect(rss.text).xml.to.be.valid()
2020-01-31 10:56:52 -05:00
const atom = await getXMLfeed(servers[0].url, feed, 'atom')
2018-06-08 14:34:37 -04:00
expect(atom.text).xml.to.be.valid()
}
})
it('Should be well formed JSON (covers JSON feed 1.0 endpoint)', async function () {
for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
2020-01-31 10:56:52 -05:00
const json = await getJSONfeed(servers[0].url, feed)
expect(JSON.parse(json.text)).to.be.jsonSchema({ type: 'object' })
2018-06-08 14:34:37 -04:00
}
})
})
describe('Videos feed', function () {
it('Should contain a valid enclosure (covers RSS 2.0 endpoint)', async function () {
for (const server of servers) {
const rss = await getXMLfeed(server.url, 'videos')
const xmlDoc = libxmljs.parseXmlString(rss.text)
const xmlEnclosure = xmlDoc.get('/rss/channel/item/enclosure')
expect(xmlEnclosure).to.exist
expect(xmlEnclosure.attr('type').value()).to.be.equal('application/x-bittorrent')
expect(xmlEnclosure.attr('length').value()).to.be.equal('218910')
expect(xmlEnclosure.attr('url').value()).to.contain('720.torrent')
}
})
it('Should contain a valid \'attachments\' object (covers JSON feed 1.0 endpoint)', async function () {
for (const server of servers) {
const json = await getJSONfeed(server.url, 'videos')
const jsonObj = JSON.parse(json.text)
2018-09-13 03:48:34 -04:00
expect(jsonObj.items.length).to.be.equal(2)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].attachments).to.exist
expect(jsonObj.items[0].attachments.length).to.be.eq(1)
expect(jsonObj.items[0].attachments[0].mime_type).to.be.eq('application/x-bittorrent')
expect(jsonObj.items[0].attachments[0].size_in_bytes).to.be.eq(218910)
expect(jsonObj.items[0].attachments[0].url).to.contain('720.torrent')
2018-06-08 14:34:37 -04:00
}
})
2018-09-13 03:48:34 -04:00
it('Should filter by account', async function () {
2019-05-31 08:02:26 -04:00
{
const json = await getJSONfeed(servers[0].url, 'videos', { accountId: rootAccountId })
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('my super name for server 1')
expect(jsonObj.items[0].author.name).to.equal('root')
2019-05-31 08:02:26 -04:00
}
{
const json = await getJSONfeed(servers[0].url, 'videos', { accountId: userAccountId })
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('user video')
expect(jsonObj.items[0].author.name).to.equal('john')
2019-05-31 08:02:26 -04:00
}
2018-09-13 03:48:34 -04:00
for (const server of servers) {
{
2019-05-31 08:02:26 -04:00
const json = await getJSONfeed(server.url, 'videos', { accountName: 'root@localhost:' + servers[0].port })
2018-09-13 03:48:34 -04:00
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('my super name for server 1')
2018-09-13 03:48:34 -04:00
}
{
2019-05-31 08:02:26 -04:00
const json = await getJSONfeed(server.url, 'videos', { accountName: 'john@localhost:' + servers[0].port })
2018-09-13 03:48:34 -04:00
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('user video')
2018-09-13 03:48:34 -04:00
}
}
2019-05-31 08:02:26 -04:00
})
2018-09-13 03:48:34 -04:00
2019-05-31 08:02:26 -04:00
it('Should filter by video channel', async function () {
2018-09-13 03:48:34 -04:00
{
2019-05-31 08:02:26 -04:00
const json = await getJSONfeed(servers[0].url, 'videos', { videoChannelId: rootChannelId })
2018-09-13 03:48:34 -04:00
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('my super name for server 1')
expect(jsonObj.items[0].author.name).to.equal('root')
2018-09-13 03:48:34 -04:00
}
{
2019-05-31 08:02:26 -04:00
const json = await getJSONfeed(servers[0].url, 'videos', { videoChannelId: userChannelId })
2018-09-13 03:48:34 -04:00
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('user video')
expect(jsonObj.items[0].author.name).to.equal('john')
2018-09-13 03:48:34 -04:00
}
for (const server of servers) {
{
2019-05-31 08:02:26 -04:00
const json = await getJSONfeed(server.url, 'videos', { videoChannelName: 'root_channel@localhost:' + servers[0].port })
2018-09-13 03:48:34 -04:00
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('my super name for server 1')
2018-09-13 03:48:34 -04:00
}
{
2019-05-31 08:02:26 -04:00
const json = await getJSONfeed(server.url, 'videos', { videoChannelName: 'john_channel@localhost:' + servers[0].port })
2018-09-13 03:48:34 -04:00
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(1)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].title).to.equal('user video')
2018-09-13 03:48:34 -04:00
}
}
})
2018-06-08 14:34:37 -04:00
})
describe('Video comments feed', function () {
it('Should contain valid comments (covers JSON feed 1.0 endpoint) and not from unlisted videos', async function () {
2018-06-08 14:34:37 -04:00
for (const server of servers) {
const json = await getJSONfeed(server.url, 'video-comments')
const jsonObj = JSON.parse(json.text)
expect(jsonObj.items.length).to.be.equal(2)
2020-01-31 10:56:52 -05:00
expect(jsonObj.items[0].html_content).to.equal('super comment 2')
expect(jsonObj.items[1].html_content).to.equal('super comment 1')
2018-06-08 14:34:37 -04:00
}
})
})
2019-04-24 09:10:37 -04:00
after(async function () {
await cleanupTests(servers)
2018-06-08 14:34:37 -04:00
})
})