Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
runningOrderId: '',
name: 'sluttvignett',
trigger: {
type: TriggerType.TIME_ABSOLUTE,
value: 0
},
status: RundownAPI.LineItemStatusCode.UNKNOWN,
sourceLayerId: SourceLayers.graphics0,
outputLayerId: 'pgm0',
expectedDuration: 15 * 1000,
content: {
timelineObjects: _.compact([
// utvignett to 0db
literal({
_id: IDs.lawo_effect, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
priority: 1,
duration: 0,
LLayer: LLayers.lawo_source_effect,
content: {
type: TimelineContentTypeLawo.SOURCE,
attributes: {
'Fader/Motor dB Value': {
value: 0,
}
}
}
}),
// @todo graphics template (on gfx1?)
// play utvignett
lawo_layer2: context.getHashId('lawo_layer2'),
}
const lawoLayer1 = inputToLawoSource(left)
const lawoLayer2 = inputToLawoSource(right)
const lawoHost = isAutomixAudio(left) || isAutomixAudio(right)
let segmentLineItems: Array = []
segmentLineItems.push(literal({
_id: context.getHashId('split'),
mosId: '',
segmentLineId: '',
runningOrderId: '',
name: 'SPLIT',
trigger: {
type: TriggerType.TIME_ABSOLUTE,
value: 0
},
status: RundownAPI.LineItemStatusCode.UNKNOWN,
sourceLayerId: SourceLayers.camera0,
outputLayerId: 'pgm0',
expectedDuration: ( // @todo rewrite this
story.getValueByPath('MosExternalMetaData.0.MosPayload.Actual') ||
story.getValueByPath('MosExternalMetaData.0.MosPayload.Estimated') ||
0
) * 1000,
content: {
timelineObjects: _.compact([
// setup ssrc
literal({
_id: IDs.atemSSrc, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
context.warning('Unknown vignett mosartVariant: ' + mosartVariant)
}
let segmentLineItems: Array = []
let IDs = {
lawo: context.getHashId('lawo'),
vignett: context.getHashId('vignett'),
atemMe: context.getHashId('atemMe')
}
let video: SegmentLineItemOptional = {
_id: context.getHashId('vignett'),
mosId: 'vignett',
name: 'Vignett',
trigger: {
type: TriggerType.TIME_ABSOLUTE,
value: 'now'
},
status: RundownAPI.LineItemStatusCode.UNKNOWN,
sourceLayerId: SourceLayers.vignett,
outputLayerId: 'pgm0',
expectedDuration: segmentLineduration,
content: {
fileName: clip,
sourceDuration: sourceDuration,
timelineObjects: [
// full sound vignett
literal({
_id: IDs.lawo, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
priority: 1,
duration: 0,
value: 0
},
status: RundownAPI.LineItemStatusCode.UNKNOWN,
sourceLayerId: SourceLayers.camera0,
outputLayerId: 'pgm0',
expectedDuration: ( // @todo rewrite this
story.getValueByPath('MosExternalMetaData.0.MosPayload.Actual') ||
story.getValueByPath('MosExternalMetaData.0.MosPayload.Estimated') ||
0
) * 1000,
content: {
timelineObjects: _.compact([
// setup ssrc
literal({
_id: IDs.atemSSrc, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
priority: 1,
duration: 0,
LLayer: LLayers.atem_supersource_override,
content: {
type: TimelineContentTypeAtem.SSRC,
attributes: {
boxes: [
literal({ // left
enabled: true,
source: left,
}),
literal({ // right
enabled: true,
source: right,
}),
literal({ // background
priority: 1,
duration: 0, // @todo TBD
LLayer: LLayers.atem_me_program,
content: {
type: TimelineContentTypeAtem.ME,
attributes: {
input: cameraInput,
transition: Atem_Enums.TransitionStyle.CUT
}
}
}),
// mic host hot
literal({
_id: IDs.lawo_automix, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
priority: 1,
duration: 0,
LLayer: LLayers.lawo_source_automix,
content: {
type: TimelineContentTypeLawo.SOURCE,
attributes: {
'Fader/Motor dB Value': {
value: 0,
transitionDuration: LawoFadeInDuration,
}
}
}
}),
])
}
})
orderedItems.forEach(i => {
if (!pieceGroup || (!allowTransition && i.isTransition)) {
return
}
const item = pieceGroup.items.find(l => l._id === i._id)
if (!item || !item.content || !item.content.timelineObjects) {
return
}
// If there is a transition and this item is abs0, it is assumed to be the primary piece and so does not need lookahead
if (hasTransition && !i.isTransition && item.trigger.type === TriggerType.TIME_ABSOLUTE && item.trigger.value === 0) {
return
}
// Note: This is assuming that there is only one use of a layer in each piece.
const obj = item.content.timelineObjects.find(o => o !== null && o.LLayer === layer)
if (obj) {
res.push(obj as TimelineObjRundown)
}
})
if (
item.content &&
item.content.timelineObjects
) {
let tos: TimelineObjectCoreExt[] = item.content.timelineObjects
const isInfiniteContinuation = item.infiniteId && item.infiniteId !== item._id
if (item.trigger.type === TriggerType.TIME_ABSOLUTE && item.trigger.value === 0 && !isInfiniteContinuation) {
// If timed absolute and there is a transition delay, then apply delay
if (!item.isTransition && allowTransition && transition && !item.adLibSourceId) {
const transitionContentsDelayStr = transitionContentsDelay < 0 ? `- ${-transitionContentsDelay}` : `+ ${transitionContentsDelay}`
item.trigger.type = TriggerType.TIME_RELATIVE
item.trigger.value = `#${getPieceGroupId(transition)}.start ${transitionContentsDelayStr}`
} else if (item.isTransition && transitionPieceDelay) {
item.trigger.type = TriggerType.TIME_ABSOLUTE
item.trigger.value = Math.max(0, transitionPieceDelay)
}
}
// create a piece group for the items and then place all of them there
const pieceGroup = createPieceGroup(item, item.durationOverride || item.duration || item.expectedDuration || 0, partGroup)
timelineObjs.push(pieceGroup)
if (!item.virtual) {
timelineObjs.push(createPieceGroupFirstObject(item, pieceGroup, firstObjClasses))
_.each(tos, (o: TimelineObjectCoreExt) => {
if (o.holdMode) {
if (isHold && !showHoldExcept && o.holdMode === TimelineObjHoldMode.EXCEPT) {
return
}
}
const gfxPayload = ParseGraffikData(context, story)
const noraHost = context.getConfigValue('nora_host_control', NoraHostControlDefault)
const noraGroup = context.getConfigValue('nora_group', 'dksl')
const noraApiKey = context.getConfigValue('nora_apikey', '')
let segmentLineItems: Array = []
segmentLineItems.push(literal({
_id: '',
mosId: '',
segmentLineId: '',
runningOrderId: '',
name: 'TLF',
trigger: {
type: TriggerType.TIME_ABSOLUTE,
value: 0
},
status: RundownAPI.LineItemStatusCode.UNKNOWN,
sourceLayerId: SourceLayers.graphics0,
outputLayerId: 'pgm0',
expectedDuration: ( // @todo rewrite this
story.getValueByPath('MosExternalMetaData.0.MosPayload.Actual') ||
story.getValueByPath('MosExternalMetaData.0.MosPayload.Estimated') ||
0
) * 1000,
content: {
timelineObjects: _.compact([
(gfxPayload ?
literal({
_id: IDs.gfxPost, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
type: TriggerType.TIME_ABSOLUTE,
value: 0
},
status: RundownAPI.LineItemStatusCode.UNKNOWN,
sourceLayerId: SourceLayers.camera0,
outputLayerId: 'pgm0',
expectedDuration: ( // @todo rewrite this
story.getValueByPath('MosExternalMetaData.0.MosPayload.Actual') ||
story.getValueByPath('MosExternalMetaData.0.MosPayload.Estimated') ||
0
) * 1000,
content: {
timelineObjects: _.compact([
literal({
_id: IDs.atemSrv1, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
priority: 1,
duration: 0,
LLayer: LLayers.atem_me_program,
content: {
type: TimelineContentTypeAtem.ME,
attributes: {
input: RMFirstInput + variant - 1,
transition: Atem_Enums.TransitionStyle.CUT
}
}
}),
// mic hot
literal({
_id: IDs.lawo_automix, deviceId: [''], siId: '', roId: '',
trigger: { type: TriggerType.TIME_ABSOLUTE, value: 0 },
objs.forEach(o => {
if (o.trigger.type === TriggerType.TIME_ABSOLUTE && (o.trigger.value === 0 || o.trigger.value === 'now')) {
o.trigger.value = 100
}
})
const tlResolved = Resolver.getTimelineInWindow(transformTimeline(objs))