Add logs endpoint
This commit is contained in:
parent
31b6ddf866
commit
fd8710b897
24 changed files with 437 additions and 79 deletions
|
@ -1,6 +0,0 @@
|
|||
import { Component } from '@angular/core'
|
||||
|
||||
@Component({
|
||||
template: '<router-outlet></router-outlet>'
|
||||
})
|
||||
export class JobsComponent {}
|
|
@ -1,32 +0,0 @@
|
|||
import { Routes } from '@angular/router'
|
||||
import { UserRight } from '../../../../../shared'
|
||||
import { UserRightGuard } from '../../core'
|
||||
import { JobsComponent } from './job.component'
|
||||
import { JobsListComponent } from './jobs-list/jobs-list.component'
|
||||
|
||||
export const JobsRoutes: Routes = [
|
||||
{
|
||||
path: 'jobs',
|
||||
component: JobsComponent,
|
||||
canActivate: [ UserRightGuard ],
|
||||
data: {
|
||||
userRight: UserRight.MANAGE_JOBS
|
||||
},
|
||||
children: [
|
||||
{
|
||||
path: '',
|
||||
redirectTo: 'list',
|
||||
pathMatch: 'full'
|
||||
},
|
||||
{
|
||||
path: 'list',
|
||||
component: JobsListComponent,
|
||||
data: {
|
||||
meta: {
|
||||
title: 'Jobs list'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
|
@ -1 +0,0 @@
|
|||
export * from './jobs-list.component'
|
|
@ -1 +0,0 @@
|
|||
export * from './job.service'
|
|
@ -1,10 +1,11 @@
|
|||
import * as program from 'commander'
|
||||
import { createReadStream, readdirSync, statSync } from 'fs-extra'
|
||||
import { createReadStream, readdir } from 'fs-extra'
|
||||
import { join } from 'path'
|
||||
import { createInterface } from 'readline'
|
||||
import * as winston from 'winston'
|
||||
import { labelFormatter } from '../server/helpers/logger'
|
||||
import { CONFIG } from '../server/initializers/constants'
|
||||
import { mtimeSortFilesDesc } from '../shared/utils/logs/logs'
|
||||
|
||||
program
|
||||
.option('-l, --level [level]', 'Level log (debug/info/warn/error)')
|
||||
|
@ -52,23 +53,42 @@ const logLevels = {
|
|||
debug: logger.debug.bind(logger)
|
||||
}
|
||||
|
||||
const logFiles = readdirSync(CONFIG.STORAGE.LOG_DIR)
|
||||
const lastLogFile = getNewestFile(logFiles, CONFIG.STORAGE.LOG_DIR)
|
||||
run()
|
||||
.then(() => process.exit(0))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
const path = join(CONFIG.STORAGE.LOG_DIR, lastLogFile)
|
||||
console.log('Opening %s.', path)
|
||||
function run () {
|
||||
return new Promise(async res => {
|
||||
const logFiles = await readdir(CONFIG.STORAGE.LOG_DIR)
|
||||
const lastLogFile = await getNewestFile(logFiles, CONFIG.STORAGE.LOG_DIR)
|
||||
|
||||
const rl = createInterface({
|
||||
input: createReadStream(path)
|
||||
})
|
||||
const path = join(CONFIG.STORAGE.LOG_DIR, lastLogFile)
|
||||
console.log('Opening %s.', path)
|
||||
|
||||
rl.on('line', line => {
|
||||
const log = JSON.parse(line)
|
||||
// Don't know why but loggerFormat does not remove splat key
|
||||
Object.assign(log, { splat: undefined })
|
||||
const stream = createReadStream(path)
|
||||
|
||||
logLevels[log.level](log)
|
||||
})
|
||||
const rl = createInterface({
|
||||
input: stream
|
||||
})
|
||||
|
||||
rl.on('line', line => {
|
||||
const log = JSON.parse(line)
|
||||
// Don't know why but loggerFormat does not remove splat key
|
||||
Object.assign(log, { splat: undefined })
|
||||
|
||||
logLevels[ log.level ](log)
|
||||
})
|
||||
|
||||
stream.once('close', () => res())
|
||||
})
|
||||
}
|
||||
|
||||
// Thanks: https://stackoverflow.com/a/37014317
|
||||
async function getNewestFile (files: string[], basePath: string) {
|
||||
const sorted = await mtimeSortFilesDesc(files, basePath)
|
||||
|
||||
return (sorted.length > 0) ? sorted[ 0 ].file : ''
|
||||
}
|
||||
|
||||
function toTimeFormat (time: string) {
|
||||
const timestamp = Date.parse(time)
|
||||
|
@ -77,17 +97,3 @@ function toTimeFormat (time: string) {
|
|||
|
||||
return new Date(timestamp).toISOString()
|
||||
}
|
||||
|
||||
// Thanks: https://stackoverflow.com/a/37014317
|
||||
function getNewestFile (files: string[], basePath: string) {
|
||||
const out = []
|
||||
|
||||
files.forEach(file => {
|
||||
const stats = statSync(basePath + '/' + file)
|
||||
if (stats.isFile()) out.push({ file, mtime: stats.mtime.getTime() })
|
||||
})
|
||||
|
||||
out.sort((a, b) => b.mtime - a.mtime)
|
||||
|
||||
return (out.length > 0) ? out[ 0 ].file : ''
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import { statsRouter } from './stats'
|
|||
import { serverRedundancyRouter } from './redundancy'
|
||||
import { serverBlocklistRouter } from './server-blocklist'
|
||||
import { contactRouter } from './contact'
|
||||
import { logsRouter } from './logs'
|
||||
|
||||
const serverRouter = express.Router()
|
||||
|
||||
|
@ -12,6 +13,7 @@ serverRouter.use('/', serverRedundancyRouter)
|
|||
serverRouter.use('/', statsRouter)
|
||||
serverRouter.use('/', serverBlocklistRouter)
|
||||
serverRouter.use('/', contactRouter)
|
||||
serverRouter.use('/', logsRouter)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
|
|
90
server/controllers/api/server/logs.ts
Normal file
90
server/controllers/api/server/logs.ts
Normal file
|
@ -0,0 +1,90 @@
|
|||
import * as express from 'express'
|
||||
import { UserRight } from '../../../../shared/models/users'
|
||||
import { asyncMiddleware, authenticate, ensureUserHasRight } from '../../../middlewares'
|
||||
import { mtimeSortFilesDesc } from '../../../../shared/utils/logs/logs'
|
||||
import { readdir } from 'fs-extra'
|
||||
import { CONFIG, MAX_LOGS_OUTPUT_CHARACTERS } from '../../../initializers'
|
||||
import { createInterface } from 'readline'
|
||||
import { createReadStream } from 'fs'
|
||||
import { join } from 'path'
|
||||
import { getLogsValidator } from '../../../middlewares/validators/logs'
|
||||
import { LogLevel } from '../../../../shared/models/server/log-level.type'
|
||||
|
||||
const logsRouter = express.Router()
|
||||
|
||||
logsRouter.get('/logs',
|
||||
authenticate,
|
||||
ensureUserHasRight(UserRight.MANAGE_LOGS),
|
||||
getLogsValidator,
|
||||
asyncMiddleware(getLogs)
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
logsRouter
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function getLogs (req: express.Request, res: express.Response) {
|
||||
const logFiles = await readdir(CONFIG.STORAGE.LOG_DIR)
|
||||
const sortedLogFiles = await mtimeSortFilesDesc(logFiles, CONFIG.STORAGE.LOG_DIR)
|
||||
let currentSize = 0
|
||||
|
||||
const startDate = new Date(req.query.startDate)
|
||||
const endDate = req.query.endDate ? new Date(req.query.endDate) : new Date()
|
||||
const level: LogLevel = req.query.level || 'info'
|
||||
|
||||
let output = ''
|
||||
|
||||
for (const meta of sortedLogFiles) {
|
||||
const path = join(CONFIG.STORAGE.LOG_DIR, meta.file)
|
||||
|
||||
const result = await getOutputFromFile(path, startDate, endDate, level, currentSize)
|
||||
if (!result.output) break
|
||||
|
||||
output = output + result.output
|
||||
currentSize = result.currentSize
|
||||
|
||||
if (currentSize > MAX_LOGS_OUTPUT_CHARACTERS) break
|
||||
}
|
||||
|
||||
return res.json(output).end()
|
||||
}
|
||||
|
||||
function getOutputFromFile (path: string, startDate: Date, endDate: Date, level: LogLevel, currentSize: number) {
|
||||
const startTime = startDate.getTime()
|
||||
const endTime = endDate.getTime()
|
||||
|
||||
const logsLevel: { [ id in LogLevel ]: number } = {
|
||||
debug: 0,
|
||||
info: 1,
|
||||
warn: 2,
|
||||
error: 3
|
||||
}
|
||||
|
||||
return new Promise<{ output: string, currentSize: number }>(res => {
|
||||
const stream = createReadStream(path)
|
||||
let output = ''
|
||||
|
||||
stream.once('close', () => res({ output, currentSize }))
|
||||
|
||||
const rl = createInterface({
|
||||
input: stream
|
||||
})
|
||||
|
||||
rl.on('line', line => {
|
||||
const log = JSON.parse(line)
|
||||
|
||||
const logTime = new Date(log.timestamp).getTime()
|
||||
if (logTime >= startTime && logTime <= endTime && logsLevel[log.level] >= logsLevel[level]) {
|
||||
output += line
|
||||
|
||||
currentSize += line.length
|
||||
|
||||
if (currentSize > MAX_LOGS_OUTPUT_CHARACTERS) stream.close()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
14
server/helpers/custom-validators/logs.ts
Normal file
14
server/helpers/custom-validators/logs.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
import { exists } from './misc'
|
||||
import { LogLevel } from '../../../shared/models/server/log-level.type'
|
||||
|
||||
const logLevels: LogLevel[] = [ 'debug', 'info', 'warn', 'error' ]
|
||||
|
||||
function isValidLogLevel (value: any) {
|
||||
return exists(value) && logLevels.indexOf(value) !== -1
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
isValidLogLevel
|
||||
}
|
|
@ -3,10 +3,12 @@ import { mkdirpSync } from 'fs-extra'
|
|||
import * as path from 'path'
|
||||
import * as winston from 'winston'
|
||||
import { CONFIG } from '../initializers'
|
||||
import { omit } from 'lodash'
|
||||
|
||||
const label = CONFIG.WEBSERVER.HOSTNAME + ':' + CONFIG.WEBSERVER.PORT
|
||||
|
||||
// Create the directory if it does not exist
|
||||
// FIXME: use async
|
||||
mkdirpSync(CONFIG.STORAGE.LOG_DIR)
|
||||
|
||||
function loggerReplacer (key: string, value: any) {
|
||||
|
@ -22,13 +24,10 @@ function loggerReplacer (key: string, value: any) {
|
|||
}
|
||||
|
||||
const consoleLoggerFormat = winston.format.printf(info => {
|
||||
const obj = {
|
||||
meta: info.meta,
|
||||
err: info.err,
|
||||
sql: info.sql
|
||||
}
|
||||
const obj = omit(info, 'label', 'timestamp', 'level', 'message')
|
||||
|
||||
let additionalInfos = JSON.stringify(obj, loggerReplacer, 2)
|
||||
|
||||
if (additionalInfos === undefined || additionalInfos === '{}') additionalInfos = ''
|
||||
else additionalInfos = ' ' + additionalInfos
|
||||
|
||||
|
@ -57,7 +56,7 @@ const logger = winston.createLogger({
|
|||
filename: path.join(CONFIG.STORAGE.LOG_DIR, 'peertube.log'),
|
||||
handleExceptions: true,
|
||||
maxsize: 1024 * 1024 * 12,
|
||||
maxFiles: 5,
|
||||
maxFiles: 20,
|
||||
format: winston.format.combine(
|
||||
winston.format.timestamp(),
|
||||
jsonLoggerFormat
|
||||
|
|
|
@ -730,6 +730,8 @@ const FEEDS = {
|
|||
COUNT: 20
|
||||
}
|
||||
|
||||
const MAX_LOGS_OUTPUT_CHARACTERS = 10 * 1000 * 1000
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const TRACKER_RATE_LIMITS = {
|
||||
|
@ -819,6 +821,7 @@ export {
|
|||
STATIC_PATHS,
|
||||
VIDEO_IMPORT_TIMEOUT,
|
||||
VIDEO_PLAYLIST_TYPES,
|
||||
MAX_LOGS_OUTPUT_CHARACTERS,
|
||||
ACTIVITY_PUB,
|
||||
ACTIVITY_PUB_ACTOR_TYPES,
|
||||
THUMBNAILS_SIZE,
|
||||
|
|
31
server/middlewares/validators/logs.ts
Normal file
31
server/middlewares/validators/logs.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
import * as express from 'express'
|
||||
import { logger } from '../../helpers/logger'
|
||||
import { areValidationErrors } from './utils'
|
||||
import { isDateValid } from '../../helpers/custom-validators/misc'
|
||||
import { query } from 'express-validator/check'
|
||||
import { isValidLogLevel } from '../../helpers/custom-validators/logs'
|
||||
|
||||
const getLogsValidator = [
|
||||
query('startDate')
|
||||
.custom(isDateValid).withMessage('Should have a valid start date'),
|
||||
query('level')
|
||||
.optional()
|
||||
.custom(isValidLogLevel).withMessage('Should have a valid level'),
|
||||
query('endDate')
|
||||
.optional()
|
||||
.custom(isDateValid).withMessage('Should have a valid end date'),
|
||||
|
||||
(req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
logger.debug('Checking getLogsValidator parameters.', { parameters: req.query })
|
||||
|
||||
if (areValidationErrors(req, res)) return
|
||||
|
||||
return next()
|
||||
}
|
||||
]
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
getLogsValidator
|
||||
}
|
|
@ -14,18 +14,18 @@ import {
|
|||
} from '../../../helpers/custom-validators/misc'
|
||||
import {
|
||||
checkUserCanManageVideo,
|
||||
isVideoOriginallyPublishedAtValid,
|
||||
doesVideoChannelOfAccountExist,
|
||||
doesVideoExist,
|
||||
isScheduleVideoUpdatePrivacyValid,
|
||||
isVideoCategoryValid,
|
||||
doesVideoChannelOfAccountExist,
|
||||
isVideoDescriptionValid,
|
||||
doesVideoExist,
|
||||
isVideoFile,
|
||||
isVideoFilterValid,
|
||||
isVideoImage,
|
||||
isVideoLanguageValid,
|
||||
isVideoLicenceValid,
|
||||
isVideoNameValid,
|
||||
isVideoOriginallyPublishedAtValid,
|
||||
isVideoPrivacyValid,
|
||||
isVideoSupportValid,
|
||||
isVideoTagsValid
|
||||
|
@ -37,10 +37,8 @@ import { authenticatePromiseIfNeeded } from '../../oauth'
|
|||
import { areValidationErrors } from '../utils'
|
||||
import { cleanUpReqFiles } from '../../../helpers/express-utils'
|
||||
import { VideoModel } from '../../../models/video/video'
|
||||
import { UserModel } from '../../../models/account/user'
|
||||
import { checkUserCanTerminateOwnershipChange, doesChangeVideoOwnershipExist } from '../../../helpers/custom-validators/video-ownership'
|
||||
import { VideoChangeOwnershipAccept } from '../../../../shared/models/videos/video-change-ownership-accept.model'
|
||||
import { VideoChangeOwnershipModel } from '../../../models/video/video-change-ownership'
|
||||
import { AccountModel } from '../../../models/account/account'
|
||||
import { VideoFetchType } from '../../../helpers/video'
|
||||
import { isNSFWQueryValid, isNumberArray, isStringArray } from '../../../helpers/custom-validators/search'
|
||||
|
|
|
@ -4,6 +4,7 @@ import './config'
|
|||
import './contact-form'
|
||||
import './follows'
|
||||
import './jobs'
|
||||
import './logs'
|
||||
import './redundancy'
|
||||
import './search'
|
||||
import './services'
|
||||
|
|
117
server/tests/api/check-params/logs.ts
Normal file
117
server/tests/api/check-params/logs.ts
Normal file
|
@ -0,0 +1,117 @@
|
|||
/* tslint:disable:no-unused-expression */
|
||||
|
||||
import 'mocha'
|
||||
|
||||
import {
|
||||
createUser,
|
||||
flushTests,
|
||||
killallServers,
|
||||
runServer,
|
||||
ServerInfo,
|
||||
setAccessTokensToServers,
|
||||
userLogin
|
||||
} from '../../../../shared/utils'
|
||||
import { makeGetRequest } from '../../../../shared/utils/requests/requests'
|
||||
|
||||
describe('Test logs API validators', function () {
|
||||
const path = '/api/v1/server/logs'
|
||||
let server: ServerInfo
|
||||
let userAccessToken = ''
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
await flushTests()
|
||||
|
||||
server = await runServer(1)
|
||||
|
||||
await setAccessTokensToServers([ server ])
|
||||
|
||||
const user = {
|
||||
username: 'user1',
|
||||
password: 'my super password'
|
||||
}
|
||||
await createUser(server.url, server.accessToken, user.username, user.password)
|
||||
userAccessToken = await userLogin(server, user)
|
||||
})
|
||||
|
||||
describe('When getting logs', function () {
|
||||
|
||||
it('Should fail with a non authenticated user', async function () {
|
||||
await makeGetRequest({
|
||||
url: server.url,
|
||||
path,
|
||||
statusCodeExpected: 401
|
||||
})
|
||||
})
|
||||
|
||||
it('Should fail with a non admin user', async function () {
|
||||
await makeGetRequest({
|
||||
url: server.url,
|
||||
path,
|
||||
token: userAccessToken,
|
||||
statusCodeExpected: 403
|
||||
})
|
||||
})
|
||||
|
||||
it('Should fail with a missing startDate query', async function () {
|
||||
await makeGetRequest({
|
||||
url: server.url,
|
||||
path,
|
||||
token: server.accessToken,
|
||||
statusCodeExpected: 400
|
||||
})
|
||||
})
|
||||
|
||||
it('Should fail with a bad startDate query', async function () {
|
||||
await makeGetRequest({
|
||||
url: server.url,
|
||||
path,
|
||||
token: server.accessToken,
|
||||
query: { startDate: 'toto' },
|
||||
statusCodeExpected: 400
|
||||
})
|
||||
})
|
||||
|
||||
it('Should fail with a bad endDate query', async function () {
|
||||
await makeGetRequest({
|
||||
url: server.url,
|
||||
path,
|
||||
token: server.accessToken,
|
||||
query: { startDate: new Date().toISOString(), endDate: 'toto' },
|
||||
statusCodeExpected: 400
|
||||
})
|
||||
})
|
||||
|
||||
it('Should fail with a bad level parameter', async function () {
|
||||
await makeGetRequest({
|
||||
url: server.url,
|
||||
path,
|
||||
token: server.accessToken,
|
||||
query: { startDate: new Date().toISOString(), level: 'toto' },
|
||||
statusCodeExpected: 400
|
||||
})
|
||||
})
|
||||
|
||||
it('Should succeed with the correct params', async function () {
|
||||
await makeGetRequest({
|
||||
url: server.url,
|
||||
path,
|
||||
token: server.accessToken,
|
||||
query: { startDate: new Date().toISOString() },
|
||||
statusCodeExpected: 200
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
killallServers([ server ])
|
||||
|
||||
// Keep the logs if the test failed
|
||||
if (this['ok']) {
|
||||
await flushTests()
|
||||
}
|
||||
})
|
||||
})
|
|
@ -6,6 +6,7 @@ import './follows'
|
|||
import './follows-moderation'
|
||||
import './handle-down'
|
||||
import './jobs'
|
||||
import './logs'
|
||||
import './reverse-proxy'
|
||||
import './stats'
|
||||
import './tracker'
|
||||
|
|
92
server/tests/api/server/logs.ts
Normal file
92
server/tests/api/server/logs.ts
Normal file
|
@ -0,0 +1,92 @@
|
|||
/* tslint:disable:no-unused-expression */
|
||||
|
||||
import * as chai from 'chai'
|
||||
import 'mocha'
|
||||
import { flushTests, killallServers, runServer, ServerInfo, setAccessTokensToServers } from '../../../../shared/utils/index'
|
||||
import { waitJobs } from '../../../../shared/utils/server/jobs'
|
||||
import { uploadVideo } from '../../../../shared/utils/videos/videos'
|
||||
import { getLogs } from '../../../../shared/utils/logs/logs'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
describe('Test logs', function () {
|
||||
let server: ServerInfo
|
||||
|
||||
before(async function () {
|
||||
this.timeout(30000)
|
||||
|
||||
await flushTests()
|
||||
|
||||
server = await runServer(1)
|
||||
await setAccessTokensToServers([ server ])
|
||||
})
|
||||
|
||||
it('Should get logs with a start date', async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
await uploadVideo(server.url, server.accessToken, { name: 'video 1' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const now = new Date()
|
||||
|
||||
await uploadVideo(server.url, server.accessToken, { name: 'video 2' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const res = await getLogs(server.url, server.accessToken, now)
|
||||
const logsString = JSON.stringify(res.body)
|
||||
|
||||
expect(logsString.includes('video 1')).to.be.false
|
||||
expect(logsString.includes('video 2')).to.be.true
|
||||
})
|
||||
|
||||
it('Should get logs with an end date', async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
await uploadVideo(server.url, server.accessToken, { name: 'video 3' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const now1 = new Date()
|
||||
|
||||
await uploadVideo(server.url, server.accessToken, { name: 'video 4' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const now2 = new Date()
|
||||
|
||||
await uploadVideo(server.url, server.accessToken, { name: 'video 5' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const res = await getLogs(server.url, server.accessToken, now1, now2)
|
||||
const logsString = JSON.stringify(res.body)
|
||||
|
||||
expect(logsString.includes('video 3')).to.be.false
|
||||
expect(logsString.includes('video 4')).to.be.true
|
||||
expect(logsString.includes('video 5')).to.be.false
|
||||
})
|
||||
|
||||
it('Should get filter by level', async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
const now = new Date()
|
||||
|
||||
await uploadVideo(server.url, server.accessToken, { name: 'video 6' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
{
|
||||
const res = await getLogs(server.url, server.accessToken, now, undefined, 'info')
|
||||
const logsString = JSON.stringify(res.body)
|
||||
|
||||
expect(logsString.includes('video 6')).to.be.true
|
||||
}
|
||||
|
||||
{
|
||||
const res = await getLogs(server.url, server.accessToken, now, undefined, 'warn')
|
||||
const logsString = JSON.stringify(res.body)
|
||||
|
||||
expect(logsString.includes('video 6')).to.be.false
|
||||
}
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
killallServers([ server ])
|
||||
})
|
||||
})
|
1
shared/models/server/log-level.type.ts
Normal file
1
shared/models/server/log-level.type.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export type LogLevel = 'debug' | 'info' | 'warn' | 'error'
|
|
@ -5,6 +5,8 @@ export enum UserRight {
|
|||
|
||||
MANAGE_SERVER_FOLLOW,
|
||||
|
||||
MANAGE_LOGS,
|
||||
|
||||
MANAGE_SERVER_REDUNDANCY,
|
||||
|
||||
MANAGE_VIDEO_ABUSES,
|
||||
|
|
41
shared/utils/logs/logs.ts
Normal file
41
shared/utils/logs/logs.ts
Normal file
|
@ -0,0 +1,41 @@
|
|||
// Thanks: https://stackoverflow.com/a/37014317
|
||||
import { stat } from 'fs-extra'
|
||||
import { makeGetRequest } from '../requests/requests'
|
||||
import { LogLevel } from '../../models/server/log-level.type'
|
||||
|
||||
async function mtimeSortFilesDesc (files: string[], basePath: string) {
|
||||
const promises = []
|
||||
const out: { file: string, mtime: number }[] = []
|
||||
|
||||
for (const file of files) {
|
||||
const p = stat(basePath + '/' + file)
|
||||
.then(stats => {
|
||||
if (stats.isFile()) out.push({ file, mtime: stats.mtime.getTime() })
|
||||
})
|
||||
|
||||
promises.push(p)
|
||||
}
|
||||
|
||||
await Promise.all(promises)
|
||||
|
||||
out.sort((a, b) => b.mtime - a.mtime)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
function getLogs (url: string, accessToken: string, startDate: Date, endDate?: Date, level?: LogLevel) {
|
||||
const path = '/api/v1/server/logs'
|
||||
|
||||
return makeGetRequest({
|
||||
url,
|
||||
path,
|
||||
token: accessToken,
|
||||
query: { startDate, endDate, level },
|
||||
statusCodeExpected: 200
|
||||
})
|
||||
}
|
||||
|
||||
export {
|
||||
mtimeSortFilesDesc,
|
||||
getLogs
|
||||
}
|
Loading…
Reference in a new issue