1
0
Fork 0
peertube/server/helpers/ffmpeg/ffmpeg-live.ts

205 lines
5.7 KiB
TypeScript
Raw Normal View History

2022-02-11 04:51:33 -05:00
import { FfmpegCommand, FilterSpecification } from 'fluent-ffmpeg'
import { join } from 'path'
import { VIDEO_LIVE } from '@server/initializers/constants'
2022-03-04 07:40:02 -05:00
import { AvailableEncoders, LiveVideoLatencyMode } from '@shared/models'
2022-02-11 04:51:33 -05:00
import { logger, loggerTagsFactory } from '../logger'
import { buildStreamSuffix, getFFmpeg, getScaleFilter, StreamType } from './ffmpeg-commons'
import { getEncoderBuilderResult } from './ffmpeg-encoders'
import { addDefaultEncoderGlobalParams, addDefaultEncoderParams, applyEncoderOptions } from './ffmpeg-presets'
import { computeFPS } from './ffprobe-utils'
const lTags = loggerTagsFactory('ffmpeg')
async function getLiveTranscodingCommand (options: {
inputUrl: string
outPath: string
masterPlaylistName: string
2022-03-04 07:40:02 -05:00
latencyMode: LiveVideoLatencyMode
2022-02-11 04:51:33 -05:00
resolutions: number[]
// Input information
fps: number
bitrate: number
ratio: number
hasAudio: boolean
2022-02-11 04:51:33 -05:00
availableEncoders: AvailableEncoders
profile: string
}) {
const {
inputUrl,
outPath,
resolutions,
fps,
bitrate,
availableEncoders,
profile,
masterPlaylistName,
ratio,
latencyMode,
hasAudio
} = options
2022-02-11 04:51:33 -05:00
const command = getFFmpeg(inputUrl, 'live')
const varStreamMap: string[] = []
const complexFilter: FilterSpecification[] = [
{
inputs: '[v:0]',
filter: 'split',
options: resolutions.length,
outputs: resolutions.map(r => `vtemp${r}`)
}
]
command.outputOption('-sc_threshold 0')
addDefaultEncoderGlobalParams(command)
for (let i = 0; i < resolutions.length; i++) {
const streamMap: string[] = []
2022-02-11 04:51:33 -05:00
const resolution = resolutions[i]
const resolutionFPS = computeFPS(fps, resolution)
const baseEncoderBuilderParams = {
input: inputUrl,
availableEncoders,
profile,
canCopyAudio: true,
canCopyVideo: true,
inputBitrate: bitrate,
inputRatio: ratio,
resolution,
fps: resolutionFPS,
streamNum: i,
videoType: 'live' as 'live'
}
{
const streamType: StreamType = 'video'
const builderResult = await getEncoderBuilderResult({ ...baseEncoderBuilderParams, streamType })
if (!builderResult) {
throw new Error('No available live video encoder found')
}
command.outputOption(`-map [vout${resolution}]`)
addDefaultEncoderParams({ command, encoder: builderResult.encoder, fps: resolutionFPS, streamNum: i })
logger.debug(
'Apply ffmpeg live video params from %s using %s profile.', builderResult.encoder, profile,
{ builderResult, fps: resolutionFPS, resolution, ...lTags() }
)
command.outputOption(`${buildStreamSuffix('-c:v', i)} ${builderResult.encoder}`)
applyEncoderOptions(command, builderResult.result)
complexFilter.push({
inputs: `vtemp${resolution}`,
filter: getScaleFilter(builderResult.result),
options: `w=-2:h=${resolution}`,
outputs: `vout${resolution}`
})
streamMap.push(`v:${i}`)
2022-02-11 04:51:33 -05:00
}
if (hasAudio) {
2022-02-11 04:51:33 -05:00
const streamType: StreamType = 'audio'
const builderResult = await getEncoderBuilderResult({ ...baseEncoderBuilderParams, streamType })
if (!builderResult) {
throw new Error('No available live audio encoder found')
}
command.outputOption('-map a:0')
addDefaultEncoderParams({ command, encoder: builderResult.encoder, fps: resolutionFPS, streamNum: i })
logger.debug(
'Apply ffmpeg live audio params from %s using %s profile.', builderResult.encoder, profile,
{ builderResult, fps: resolutionFPS, resolution, ...lTags() }
)
command.outputOption(`${buildStreamSuffix('-c:a', i)} ${builderResult.encoder}`)
applyEncoderOptions(command, builderResult.result)
streamMap.push(`a:${i}`)
2022-02-11 04:51:33 -05:00
}
varStreamMap.push(streamMap.join(','))
2022-02-11 04:51:33 -05:00
}
command.complexFilter(complexFilter)
2022-03-04 07:40:02 -05:00
addDefaultLiveHLSParams({ command, outPath, masterPlaylistName, latencyMode })
2022-02-11 04:51:33 -05:00
command.outputOption('-var_stream_map', varStreamMap.join(' '))
return command
}
2022-03-04 07:40:02 -05:00
function getLiveMuxingCommand (options: {
inputUrl: string
outPath: string
masterPlaylistName: string
latencyMode: LiveVideoLatencyMode
}) {
const { inputUrl, outPath, masterPlaylistName, latencyMode } = options
2022-02-11 04:51:33 -05:00
const command = getFFmpeg(inputUrl, 'live')
command.outputOption('-c:v copy')
command.outputOption('-c:a copy')
command.outputOption('-map 0:a?')
command.outputOption('-map 0:v?')
2022-03-04 07:40:02 -05:00
addDefaultLiveHLSParams({ command, outPath, masterPlaylistName, latencyMode })
2022-02-11 04:51:33 -05:00
return command
}
2022-03-04 07:40:02 -05:00
function getLiveSegmentTime (latencyMode: LiveVideoLatencyMode) {
if (latencyMode === LiveVideoLatencyMode.SMALL_LATENCY) {
return VIDEO_LIVE.SEGMENT_TIME_SECONDS.SMALL_LATENCY
}
return VIDEO_LIVE.SEGMENT_TIME_SECONDS.DEFAULT_LATENCY
}
2022-02-11 04:51:33 -05:00
// ---------------------------------------------------------------------------
export {
2022-03-04 07:40:02 -05:00
getLiveSegmentTime,
2022-02-11 04:51:33 -05:00
getLiveTranscodingCommand,
getLiveMuxingCommand
}
// ---------------------------------------------------------------------------
2022-03-04 07:40:02 -05:00
function addDefaultLiveHLSParams (options: {
command: FfmpegCommand
outPath: string
masterPlaylistName: string
latencyMode: LiveVideoLatencyMode
}) {
const { command, outPath, masterPlaylistName, latencyMode } = options
command.outputOption('-hls_time ' + getLiveSegmentTime(latencyMode))
2022-02-11 04:51:33 -05:00
command.outputOption('-hls_list_size ' + VIDEO_LIVE.SEGMENTS_LIST_SIZE)
2022-04-19 10:22:21 -04:00
command.outputOption('-hls_flags delete_segments+independent_segments+program_date_time')
2022-02-11 04:51:33 -05:00
command.outputOption(`-hls_segment_filename ${join(outPath, '%v-%06d.ts')}`)
command.outputOption('-master_pl_name ' + masterPlaylistName)
command.outputOption(`-f hls`)
command.output(join(outPath, '%v.m3u8'))
}