2018-08-27 04:57:14 -04:00
|
|
|
#!/usr/bin/env python3
|
2019-03-26 16:04:31 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2018-01-07 07:58:45 -05:00
|
|
|
|
|
|
|
# This file is part of ffplayout.
|
|
|
|
#
|
|
|
|
# ffplayout is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# ffplayout is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with ffplayout. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
import configparser
|
2019-06-06 12:16:33 -04:00
|
|
|
import glob
|
2019-03-06 09:06:06 -05:00
|
|
|
import json
|
2019-03-08 08:51:37 -05:00
|
|
|
import logging
|
2019-07-19 08:58:28 -04:00
|
|
|
import math
|
2019-03-06 09:06:06 -05:00
|
|
|
import os
|
2019-06-06 12:16:33 -04:00
|
|
|
import random
|
2019-06-06 16:02:20 -04:00
|
|
|
import signal
|
2019-07-20 18:54:06 -04:00
|
|
|
import smtplib
|
2018-11-14 04:47:53 -05:00
|
|
|
import socket
|
2019-08-13 15:41:10 -04:00
|
|
|
import ssl
|
2018-01-10 09:41:56 -05:00
|
|
|
import sys
|
2019-06-06 12:16:33 -04:00
|
|
|
import time
|
2018-01-10 09:41:56 -05:00
|
|
|
from argparse import ArgumentParser
|
2018-11-14 04:47:53 -05:00
|
|
|
from datetime import date, datetime, timedelta
|
2018-01-07 07:58:45 -05:00
|
|
|
from email.mime.multipart import MIMEMultipart
|
|
|
|
from email.mime.text import MIMEText
|
2018-11-14 04:47:53 -05:00
|
|
|
from email.utils import formatdate
|
2018-01-10 09:41:56 -05:00
|
|
|
from logging.handlers import TimedRotatingFileHandler
|
2019-05-28 11:43:57 -04:00
|
|
|
from shutil import copyfileobj
|
2019-03-13 10:05:04 -04:00
|
|
|
from subprocess import PIPE, CalledProcessError, Popen, check_output
|
2018-01-07 07:58:45 -05:00
|
|
|
from threading import Thread
|
2018-01-09 11:54:50 -05:00
|
|
|
from types import SimpleNamespace
|
2019-08-12 08:30:47 -04:00
|
|
|
from urllib import request
|
2018-01-07 07:58:45 -05:00
|
|
|
|
2019-08-20 08:45:05 -04:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# argument parsing
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
stdin_parser = ArgumentParser(
|
|
|
|
description='python and ffmpeg based playout',
|
|
|
|
epilog="don't use parameters if you want to use this settings from config")
|
|
|
|
|
|
|
|
stdin_parser.add_argument(
|
|
|
|
'-c', '--config', help='file path to ffplayout.conf'
|
|
|
|
)
|
|
|
|
|
2019-10-15 08:46:30 -04:00
|
|
|
stdin_parser.add_argument(
|
|
|
|
'-d', '--desktop', help='preview on desktop', action='store_true'
|
|
|
|
)
|
|
|
|
|
2019-08-20 08:45:05 -04:00
|
|
|
stdin_parser.add_argument(
|
|
|
|
'-f', '--folder', help='play folder content'
|
|
|
|
)
|
|
|
|
|
|
|
|
stdin_parser.add_argument(
|
|
|
|
'-l', '--log', help='file path for logfile'
|
|
|
|
)
|
|
|
|
|
|
|
|
stdin_parser.add_argument(
|
|
|
|
'-p', '--playlist', help='path from playlist'
|
|
|
|
)
|
|
|
|
|
|
|
|
stdin_args = stdin_parser.parse_args()
|
|
|
|
|
2018-01-07 16:31:06 -05:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# read variables from config file
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
2018-01-07 11:56:54 -05:00
|
|
|
# read config
|
|
|
|
cfg = configparser.ConfigParser()
|
2019-08-20 08:45:05 -04:00
|
|
|
|
|
|
|
if stdin_args.config:
|
|
|
|
cfg.read(stdin_args.config)
|
|
|
|
elif os.path.isfile('/etc/ffplayout/ffplayout.conf'):
|
2019-05-10 05:10:08 -04:00
|
|
|
cfg.read('/etc/ffplayout/ffplayout.conf')
|
2018-04-29 12:07:42 -04:00
|
|
|
else:
|
2019-05-10 05:10:08 -04:00
|
|
|
cfg.read('ffplayout.conf')
|
2018-01-07 11:56:54 -05:00
|
|
|
|
2019-03-11 07:29:47 -04:00
|
|
|
_general = SimpleNamespace(
|
2019-03-11 16:34:07 -04:00
|
|
|
stop=cfg.getboolean('GENERAL', 'stop_on_error'),
|
2019-08-04 15:48:01 -04:00
|
|
|
threshold=cfg.getfloat('GENERAL', 'stop_threshold')
|
2019-03-11 07:29:47 -04:00
|
|
|
)
|
|
|
|
|
2018-01-09 11:54:50 -05:00
|
|
|
_mail = SimpleNamespace(
|
2019-03-08 08:51:37 -05:00
|
|
|
subject=cfg.get('MAIL', 'subject'),
|
2018-01-09 11:54:50 -05:00
|
|
|
server=cfg.get('MAIL', 'smpt_server'),
|
|
|
|
port=cfg.getint('MAIL', 'smpt_port'),
|
|
|
|
s_addr=cfg.get('MAIL', 'sender_addr'),
|
|
|
|
s_pass=cfg.get('MAIL', 'sender_pass'),
|
2019-05-10 05:10:08 -04:00
|
|
|
recip=cfg.get('MAIL', 'recipient'),
|
|
|
|
level=cfg.get('MAIL', 'mail_level')
|
2018-01-09 11:54:50 -05:00
|
|
|
)
|
|
|
|
|
2018-01-10 09:41:56 -05:00
|
|
|
_log = SimpleNamespace(
|
2019-07-31 04:10:26 -04:00
|
|
|
to_file=cfg.getboolean('LOGGING', 'log_to_file'),
|
2018-01-10 09:41:56 -05:00
|
|
|
path=cfg.get('LOGGING', 'log_file'),
|
|
|
|
level=cfg.get('LOGGING', 'log_level')
|
|
|
|
)
|
|
|
|
|
2018-01-09 11:54:50 -05:00
|
|
|
_pre_comp = SimpleNamespace(
|
|
|
|
w=cfg.getint('PRE_COMPRESS', 'width'),
|
|
|
|
h=cfg.getint('PRE_COMPRESS', 'height'),
|
2019-08-09 11:31:39 -04:00
|
|
|
aspect=cfg.getfloat('PRE_COMPRESS', 'aspect'),
|
2018-01-09 11:54:50 -05:00
|
|
|
fps=cfg.getint('PRE_COMPRESS', 'fps'),
|
2019-08-09 11:31:39 -04:00
|
|
|
v_bitrate=cfg.getint('PRE_COMPRESS', 'width') * 50,
|
|
|
|
v_bufsize=cfg.getint('PRE_COMPRESS', 'width') * 50 / 2,
|
2019-03-12 13:12:46 -04:00
|
|
|
logo=cfg.get('PRE_COMPRESS', 'logo'),
|
2019-09-01 16:42:00 -04:00
|
|
|
opacity=cfg.get('PRE_COMPRESS', 'logo_opacity'),
|
2019-03-12 13:12:46 -04:00
|
|
|
logo_filter=cfg.get('PRE_COMPRESS', 'logo_filter'),
|
2019-09-11 17:20:24 -04:00
|
|
|
add_loudnorm=cfg.getboolean('PRE_COMPRESS', 'add_loudnorm'),
|
|
|
|
loud_i=cfg.getfloat('PRE_COMPRESS', 'loud_I'),
|
|
|
|
loud_tp=cfg.getfloat('PRE_COMPRESS', 'loud_TP'),
|
|
|
|
loud_lra=cfg.getfloat('PRE_COMPRESS', 'loud_LRA'),
|
2019-08-09 11:31:39 -04:00
|
|
|
protocols=cfg.get('PRE_COMPRESS', 'live_protocols')
|
2018-01-09 11:54:50 -05:00
|
|
|
)
|
|
|
|
|
2019-07-19 06:49:01 -04:00
|
|
|
stime = cfg.get('PLAYLIST', 'day_start').split(':')
|
|
|
|
|
|
|
|
if stime[0] and stime[1] and stime[2]:
|
|
|
|
start_t = float(stime[0]) * 3600 + float(stime[1]) * 60 + float(stime[2])
|
|
|
|
else:
|
|
|
|
start_t = None
|
|
|
|
|
2018-01-09 11:54:50 -05:00
|
|
|
_playlist = SimpleNamespace(
|
2019-08-04 15:48:01 -04:00
|
|
|
mode=cfg.getboolean('PLAYLIST', 'playlist_mode'),
|
|
|
|
path=cfg.get('PLAYLIST', 'path'),
|
|
|
|
start=start_t
|
2018-01-09 11:54:50 -05:00
|
|
|
)
|
|
|
|
|
2019-08-04 15:48:01 -04:00
|
|
|
_storage = SimpleNamespace(
|
|
|
|
path=cfg.get('STORAGE', 'path'),
|
|
|
|
filler=cfg.get('STORAGE', 'filler_clip'),
|
|
|
|
extensions=json.loads(cfg.get('STORAGE', 'extensions')),
|
|
|
|
shuffle=cfg.getboolean('STORAGE', 'shuffle')
|
2019-06-06 12:16:33 -04:00
|
|
|
)
|
|
|
|
|
2019-06-07 10:42:11 -04:00
|
|
|
_text = SimpleNamespace(
|
2019-06-11 09:28:52 -04:00
|
|
|
textfile=cfg.get('TEXT', 'textfile'),
|
2019-06-07 10:42:11 -04:00
|
|
|
fontsize=cfg.get('TEXT', 'fontsize'),
|
|
|
|
fontcolor=cfg.get('TEXT', 'fontcolor'),
|
|
|
|
fontfile=cfg.get('TEXT', 'fontfile'),
|
2019-06-11 09:28:52 -04:00
|
|
|
box=cfg.get('TEXT', 'box'),
|
|
|
|
boxcolor=cfg.get('TEXT', 'boxcolor'),
|
|
|
|
boxborderw=cfg.get('TEXT', 'boxborderw'),
|
2019-06-07 10:42:11 -04:00
|
|
|
x=cfg.get('TEXT', 'x'),
|
|
|
|
y=cfg.get('TEXT', 'y')
|
|
|
|
)
|
|
|
|
|
2018-01-09 11:54:50 -05:00
|
|
|
_playout = SimpleNamespace(
|
2018-11-25 08:24:47 -05:00
|
|
|
preview=cfg.getboolean('OUT', 'preview'),
|
2018-01-09 11:54:50 -05:00
|
|
|
name=cfg.get('OUT', 'service_name'),
|
|
|
|
provider=cfg.get('OUT', 'service_provider'),
|
|
|
|
out_addr=cfg.get('OUT', 'out_addr'),
|
2019-06-04 04:16:15 -04:00
|
|
|
post_comp_video=json.loads(cfg.get('OUT', 'post_comp_video')),
|
|
|
|
post_comp_audio=json.loads(cfg.get('OUT', 'post_comp_audio')),
|
2019-09-08 15:24:02 -04:00
|
|
|
post_comp_extra=json.loads(cfg.get('OUT', 'post_comp_extra'))
|
2018-01-09 11:54:50 -05:00
|
|
|
)
|
|
|
|
|
2018-01-07 07:58:45 -05:00
|
|
|
|
2018-01-10 09:41:56 -05:00
|
|
|
# ------------------------------------------------------------------------------
|
2019-08-20 08:45:05 -04:00
|
|
|
# logging
|
2018-01-10 09:41:56 -05:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
# If the log file is specified on the command line then override the default
|
|
|
|
if stdin_args.log:
|
2019-03-18 16:23:56 -04:00
|
|
|
_log.path = stdin_args.log
|
2018-01-10 09:41:56 -05:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
logger.setLevel(_log.level)
|
2019-05-10 05:10:08 -04:00
|
|
|
handler = TimedRotatingFileHandler(_log.path, when='midnight', backupCount=5)
|
2018-01-16 03:31:59 -05:00
|
|
|
formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s')
|
2018-01-10 09:41:56 -05:00
|
|
|
handler.setFormatter(formatter)
|
2019-07-31 04:10:26 -04:00
|
|
|
|
|
|
|
if _log.to_file:
|
|
|
|
logger.addHandler(handler)
|
|
|
|
else:
|
|
|
|
logger.addHandler(logging.StreamHandler())
|
2018-01-10 09:41:56 -05:00
|
|
|
|
|
|
|
|
2019-03-10 16:24:03 -04:00
|
|
|
class PlayoutLogger(object):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
capture stdout and sterr in the log
|
|
|
|
"""
|
2019-09-09 14:09:18 -04:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
def __init__(self, logger, level):
|
|
|
|
self.logger = logger
|
|
|
|
self.level = level
|
2018-01-10 09:41:56 -05:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
def write(self, message):
|
|
|
|
# Only log if there is a message (not just a new line)
|
2019-05-10 05:10:08 -04:00
|
|
|
if message.rstrip() != '':
|
2019-03-08 08:51:37 -05:00
|
|
|
self.logger.log(self.level, message.rstrip())
|
2018-01-10 09:41:56 -05:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
def flush(self):
|
|
|
|
pass
|
2018-01-10 09:41:56 -05:00
|
|
|
|
|
|
|
|
|
|
|
# Replace stdout with logging to file at INFO level
|
2019-03-10 16:24:03 -04:00
|
|
|
sys.stdout = PlayoutLogger(logger, logging.INFO)
|
2018-01-10 09:41:56 -05:00
|
|
|
# Replace stderr with logging to file at ERROR level
|
2019-03-10 16:24:03 -04:00
|
|
|
sys.stderr = PlayoutLogger(logger, logging.ERROR)
|
2018-01-10 09:41:56 -05:00
|
|
|
|
|
|
|
|
2019-03-12 16:07:15 -04:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# mail sender
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
2019-05-10 05:10:08 -04:00
|
|
|
class Mailer:
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
mailer class for log messages, with level selector
|
|
|
|
"""
|
2019-09-09 14:09:18 -04:00
|
|
|
|
2019-05-10 05:10:08 -04:00
|
|
|
def __init__(self):
|
|
|
|
self.level = _mail.level
|
|
|
|
self.time = None
|
|
|
|
|
|
|
|
def current_time(self):
|
|
|
|
self.time = get_time(None)
|
|
|
|
|
|
|
|
def send_mail(self, msg):
|
|
|
|
if _mail.recip:
|
|
|
|
self.current_time()
|
|
|
|
|
|
|
|
message = MIMEMultipart()
|
|
|
|
message['From'] = _mail.s_addr
|
|
|
|
message['To'] = _mail.recip
|
|
|
|
message['Subject'] = _mail.subject
|
|
|
|
message['Date'] = formatdate(localtime=True)
|
|
|
|
message.attach(MIMEText('{} {}'.format(self.time, msg), 'plain'))
|
|
|
|
text = message.as_string()
|
|
|
|
|
2019-03-12 16:07:15 -04:00
|
|
|
try:
|
2019-05-10 05:10:08 -04:00
|
|
|
server = smtplib.SMTP(_mail.server, _mail.port)
|
|
|
|
except socket.error as err:
|
|
|
|
logger.error(err)
|
|
|
|
server = None
|
|
|
|
|
|
|
|
if server is not None:
|
|
|
|
server.starttls()
|
|
|
|
try:
|
|
|
|
login = server.login(_mail.s_addr, _mail.s_pass)
|
|
|
|
except smtplib.SMTPAuthenticationError as serr:
|
|
|
|
logger.error(serr)
|
|
|
|
login = None
|
2019-03-12 16:07:15 -04:00
|
|
|
|
2019-05-10 05:10:08 -04:00
|
|
|
if login is not None:
|
|
|
|
server.sendmail(_mail.s_addr, _mail.recip, text)
|
|
|
|
server.quit()
|
|
|
|
|
|
|
|
def info(self, msg):
|
|
|
|
if self.level in ['INFO']:
|
|
|
|
self.send_mail(msg)
|
|
|
|
|
|
|
|
def warning(self, msg):
|
|
|
|
if self.level in ['INFO', 'WARNING']:
|
|
|
|
self.send_mail(msg)
|
|
|
|
|
|
|
|
def error(self, msg):
|
|
|
|
if self.level in ['INFO', 'WARNING', 'ERROR']:
|
|
|
|
self.send_mail(msg)
|
|
|
|
|
|
|
|
|
|
|
|
mailer = Mailer()
|
2019-03-12 16:07:15 -04:00
|
|
|
|
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# probe media infos
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
class MediaProbe:
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
get infos about media file, similare to mediainfo
|
|
|
|
"""
|
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
def load(self, file):
|
|
|
|
self.format = None
|
|
|
|
self.audio = []
|
|
|
|
self.video = []
|
|
|
|
|
|
|
|
cmd = ['ffprobe', '-v', 'quiet', '-print_format',
|
|
|
|
'json', '-show_format', '-show_streams', file]
|
|
|
|
|
|
|
|
info = json.loads(check_output(cmd).decode(encoding='UTF-8'))
|
|
|
|
|
|
|
|
self.format = info['format']
|
|
|
|
|
|
|
|
for stream in info['streams']:
|
|
|
|
if stream['codec_type'] == 'audio':
|
|
|
|
self.audio.append(stream)
|
|
|
|
|
|
|
|
if stream['codec_type'] == 'video':
|
|
|
|
if 'display_aspect_ratio' not in stream:
|
2019-09-10 03:10:46 -04:00
|
|
|
stream['aspect'] = float(
|
|
|
|
stream['width']) / float(stream['height'])
|
2019-09-01 16:42:00 -04:00
|
|
|
else:
|
|
|
|
w, h = stream['display_aspect_ratio'].split(':')
|
|
|
|
stream['aspect'] = float(w) / float(h)
|
|
|
|
|
|
|
|
a, b = stream['r_frame_rate'].split('/')
|
|
|
|
stream['fps'] = float(a) / float(b)
|
|
|
|
|
|
|
|
self.video.append(stream)
|
|
|
|
|
|
|
|
|
2018-01-07 07:58:45 -05:00
|
|
|
# ------------------------------------------------------------------------------
|
2018-01-09 11:54:50 -05:00
|
|
|
# global helper functions
|
2018-01-07 07:58:45 -05:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
2019-06-07 03:40:32 -04:00
|
|
|
def handle_sigterm(sig, frame):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
handler for ctrl+c signal
|
|
|
|
"""
|
2019-06-07 03:40:32 -04:00
|
|
|
raise(SystemExit)
|
|
|
|
|
|
|
|
|
|
|
|
signal.signal(signal.SIGTERM, handle_sigterm)
|
|
|
|
|
|
|
|
|
|
|
|
def terminate_processes(decoder, encoder, watcher):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
kill orphaned processes
|
|
|
|
"""
|
2019-06-07 03:40:32 -04:00
|
|
|
if decoder.poll() is None:
|
|
|
|
decoder.terminate()
|
|
|
|
|
|
|
|
if encoder.poll() is None:
|
|
|
|
encoder.terminate()
|
|
|
|
|
|
|
|
if watcher:
|
|
|
|
watcher.stop()
|
|
|
|
|
|
|
|
|
2018-01-09 11:54:50 -05:00
|
|
|
def get_time(time_format):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
get different time formats:
|
|
|
|
- full_sec > current time in seconds
|
|
|
|
- stamp > current date time in seconds
|
|
|
|
- else > current time in HH:MM:SS
|
|
|
|
"""
|
2019-07-19 06:49:01 -04:00
|
|
|
t = datetime.today()
|
2019-09-09 10:13:18 -04:00
|
|
|
|
|
|
|
if time_format == 'full_sec':
|
2019-06-19 10:11:30 -04:00
|
|
|
return t.hour * 3600 + t.minute * 60 + t.second \
|
|
|
|
+ t.microsecond / 1000000
|
2019-03-19 13:03:31 -04:00
|
|
|
elif time_format == 'stamp':
|
|
|
|
return float(datetime.now().timestamp())
|
2018-01-09 11:54:50 -05:00
|
|
|
else:
|
2019-05-10 05:10:08 -04:00
|
|
|
return t.strftime('%H:%M:%S')
|
2018-01-07 16:31:06 -05:00
|
|
|
|
2018-01-09 11:54:50 -05:00
|
|
|
|
|
|
|
def get_date(seek_day):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
get date for correct playlist,
|
|
|
|
when _playlist.start and seek_day is set:
|
|
|
|
check if playlist date must be from yesterday
|
|
|
|
"""
|
2019-07-19 06:49:01 -04:00
|
|
|
d = date.today()
|
|
|
|
if _playlist.start and seek_day and get_time('full_sec') < _playlist.start:
|
2018-08-15 10:18:09 -04:00
|
|
|
yesterday = d - timedelta(1)
|
2018-01-09 11:54:50 -05:00
|
|
|
return yesterday.strftime('%Y-%m-%d')
|
|
|
|
else:
|
2018-08-15 10:18:09 -04:00
|
|
|
return d.strftime('%Y-%m-%d')
|
2018-01-07 16:31:06 -05:00
|
|
|
|
|
|
|
|
2019-03-12 13:12:46 -04:00
|
|
|
def is_float(value):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
test if value is float
|
|
|
|
"""
|
2019-03-12 13:12:46 -04:00
|
|
|
try:
|
|
|
|
float(value)
|
|
|
|
return True
|
2019-08-16 09:56:41 -04:00
|
|
|
except (ValueError, TypeError):
|
2019-03-12 13:12:46 -04:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def is_int(value):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
test if value is int
|
|
|
|
"""
|
2019-03-12 13:12:46 -04:00
|
|
|
try:
|
|
|
|
int(value)
|
|
|
|
return True
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-09-09 10:13:18 -04:00
|
|
|
def valid_json(file):
|
|
|
|
"""
|
|
|
|
simple json validation
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
json_object = json.load(file)
|
|
|
|
return json_object
|
|
|
|
except ValueError:
|
|
|
|
logger.error("Playlist {} is not JSON conform".format(file))
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2019-06-03 15:33:29 -04:00
|
|
|
def check_sync(begin, encoder):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
compare clip play time with real time,
|
|
|
|
to see if we are sync
|
|
|
|
"""
|
2018-02-19 09:13:24 -05:00
|
|
|
time_now = get_time('full_sec')
|
2019-03-08 03:53:31 -05:00
|
|
|
|
2019-05-27 05:42:18 -04:00
|
|
|
time_distance = begin - time_now
|
2019-07-19 06:49:01 -04:00
|
|
|
if _playlist.start and 0 <= time_now < _playlist.start and \
|
|
|
|
not begin == _playlist.start:
|
2019-03-29 10:19:10 -04:00
|
|
|
time_distance -= 86400.0
|
2018-02-19 09:13:24 -05:00
|
|
|
|
|
|
|
# check that we are in tolerance time
|
2019-06-03 15:33:29 -04:00
|
|
|
if _general.stop and abs(time_distance) > _general.threshold:
|
|
|
|
mailer.error(
|
2019-09-06 06:02:11 -04:00
|
|
|
'Sync tolerance value exceeded with {0:.2f} seconds,\n'
|
2019-06-03 15:33:29 -04:00
|
|
|
'program terminated!'.format(time_distance))
|
2019-08-16 10:00:59 -04:00
|
|
|
logger.error(
|
2019-09-06 06:02:11 -04:00
|
|
|
('Sync tolerance value exceeded with '
|
|
|
|
'{0:.2f} seconds, program terminated!').format(time_distance)
|
|
|
|
)
|
2019-06-03 15:33:29 -04:00
|
|
|
encoder.terminate()
|
|
|
|
sys.exit(1)
|
2019-03-11 07:29:47 -04:00
|
|
|
|
2018-02-19 09:13:24 -05:00
|
|
|
|
2019-07-20 18:54:06 -04:00
|
|
|
def check_length(json_nodes, total_play_time):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
check if playlist is long enough
|
|
|
|
"""
|
2019-07-20 18:54:06 -04:00
|
|
|
if 'length' in json_nodes:
|
|
|
|
l_h, l_m, l_s = json_nodes["length"].split(':')
|
|
|
|
if is_float(l_h) and is_float(l_m) and is_float(l_s):
|
|
|
|
length = float(l_h) * 3600 + float(l_m) * 60 + float(l_s)
|
2019-07-19 06:49:01 -04:00
|
|
|
|
2019-07-20 18:54:06 -04:00
|
|
|
if 'date' in json_nodes:
|
|
|
|
date = json_nodes["date"]
|
|
|
|
else:
|
|
|
|
date = get_date(True)
|
2019-03-15 04:18:41 -04:00
|
|
|
|
2019-07-20 18:54:06 -04:00
|
|
|
if total_play_time < length - 5:
|
|
|
|
mailer.error(
|
|
|
|
'Playlist ({}) is not long enough!\n'
|
|
|
|
'total play time is: {}'.format(
|
|
|
|
date,
|
|
|
|
timedelta(seconds=total_play_time))
|
|
|
|
)
|
|
|
|
logger.error('Playlist is only {} hours long!'.format(
|
|
|
|
timedelta(seconds=total_play_time)))
|
2019-03-06 09:06:06 -05:00
|
|
|
|
|
|
|
|
2018-02-19 05:12:13 -05:00
|
|
|
def validate_thread(clip_nodes):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
validate json values in new thread
|
|
|
|
and test if source paths exist
|
|
|
|
"""
|
2019-03-06 09:06:06 -05:00
|
|
|
def check_json(json_nodes):
|
2018-02-19 05:12:13 -05:00
|
|
|
error = ''
|
2019-03-06 09:06:06 -05:00
|
|
|
counter = 0
|
2018-02-13 08:23:34 -05:00
|
|
|
|
2018-03-29 04:30:18 -04:00
|
|
|
# check if all values are valid
|
2019-03-06 09:06:06 -05:00
|
|
|
for node in json_nodes["program"]:
|
2019-07-18 12:00:56 -04:00
|
|
|
source = node["source"]
|
2019-03-06 09:06:06 -05:00
|
|
|
prefix = source.split('://')[0]
|
2019-06-04 04:16:15 -04:00
|
|
|
missing = []
|
|
|
|
|
2019-07-31 04:10:26 -04:00
|
|
|
if source and prefix in _pre_comp.protocols:
|
2019-03-04 11:54:36 -05:00
|
|
|
cmd = [
|
|
|
|
'ffprobe', '-v', 'error',
|
|
|
|
'-show_entries', 'format=duration',
|
2019-03-06 09:06:06 -05:00
|
|
|
'-of', 'default=noprint_wrappers=1:nokey=1', source]
|
2019-03-04 11:54:36 -05:00
|
|
|
|
2019-03-13 10:05:04 -04:00
|
|
|
try:
|
|
|
|
output = check_output(cmd).decode('utf-8')
|
|
|
|
except CalledProcessError:
|
|
|
|
output = '404'
|
|
|
|
|
|
|
|
if '404' in output:
|
2019-06-04 04:16:15 -04:00
|
|
|
missing.append('Stream not exist: "{}"'.format(source))
|
2019-06-05 08:21:03 -04:00
|
|
|
elif not os.path.isfile(source):
|
2019-06-04 04:16:15 -04:00
|
|
|
missing.append('File not exist: "{}"'.format(source))
|
2018-02-13 08:23:34 -05:00
|
|
|
|
2019-03-06 09:06:06 -05:00
|
|
|
if is_float(node["in"]) and is_float(node["out"]):
|
|
|
|
counter += node["out"] - node["in"]
|
|
|
|
else:
|
2019-06-04 04:16:15 -04:00
|
|
|
missing.append('Missing Value in: "{}"'.format(node))
|
2018-02-13 08:23:34 -05:00
|
|
|
|
2019-06-04 04:16:15 -04:00
|
|
|
if not is_float(node["duration"]):
|
|
|
|
missing.append('No duration Value!')
|
2019-03-06 09:06:06 -05:00
|
|
|
|
2019-06-04 04:16:15 -04:00
|
|
|
line = '\n'.join(missing)
|
2018-02-19 05:12:13 -05:00
|
|
|
if line:
|
2019-03-26 16:04:31 -04:00
|
|
|
logger.error('Validation error :: {}'.format(line))
|
2019-06-04 04:16:15 -04:00
|
|
|
error += line + '\nIn line: {}\n\n'.format(node)
|
2018-02-13 08:23:34 -05:00
|
|
|
|
2018-02-19 05:12:13 -05:00
|
|
|
if error:
|
2019-05-10 05:10:08 -04:00
|
|
|
mailer.error(
|
|
|
|
'Validation error, check JSON playlist, '
|
|
|
|
'values are missing:\n{}'.format(error)
|
2018-03-29 04:30:18 -04:00
|
|
|
)
|
|
|
|
|
2019-07-20 18:54:06 -04:00
|
|
|
check_length(json_nodes, counter)
|
2018-02-19 05:12:13 -05:00
|
|
|
|
2019-03-06 09:06:06 -05:00
|
|
|
validate = Thread(name='check_json', target=check_json, args=(clip_nodes,))
|
2018-02-19 05:12:13 -05:00
|
|
|
validate.daemon = True
|
|
|
|
validate.start()
|
|
|
|
|
|
|
|
|
2019-03-12 13:12:46 -04:00
|
|
|
def seek_in(seek):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
seek in clip
|
|
|
|
"""
|
2019-03-12 13:12:46 -04:00
|
|
|
if seek > 0.0:
|
|
|
|
return ['-ss', str(seek)]
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2019-03-20 12:26:58 -04:00
|
|
|
def set_length(duration, seek, out):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
set new clip length
|
|
|
|
"""
|
2019-03-12 13:12:46 -04:00
|
|
|
if out < duration:
|
|
|
|
return ['-t', str(out - seek)]
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2019-10-15 09:56:12 -04:00
|
|
|
def loop_input(source, src_duration, target_duration):
|
|
|
|
# loop filles n times
|
|
|
|
loop_count = math.ceil(target_duration / src_duration)
|
|
|
|
logger.info(
|
|
|
|
'Loop "{0}" {1} times, total duration: {2:.2f}'.format(
|
|
|
|
source, loop_count, target_duration))
|
|
|
|
return ['-stream_loop', str(loop_count),
|
|
|
|
'-i', source, '-t', str(target_duration)]
|
|
|
|
|
|
|
|
|
2019-03-12 13:12:46 -04:00
|
|
|
def gen_dummy(duration):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
generate a dummy clip, with black color and empty audiotrack
|
|
|
|
"""
|
2019-09-08 15:24:02 -04:00
|
|
|
color = '#121212'
|
2019-09-09 10:13:18 -04:00
|
|
|
# IDEA: add noise could be an config option
|
2019-09-08 15:24:02 -04:00
|
|
|
# noise = 'noise=alls=50:allf=t+u,hue=s=0'
|
|
|
|
return [
|
|
|
|
'-f', 'lavfi', '-i',
|
|
|
|
'color=c={}:s={}x{}:d={}:r={},format=pix_fmts=yuv420p'.format(
|
|
|
|
color, _pre_comp.w, _pre_comp.h, duration, _pre_comp.fps
|
|
|
|
),
|
|
|
|
'-f', 'lavfi', '-i', 'anoisesrc=d={}:c=pink:r=48000:a=0.05'.format(
|
|
|
|
duration)
|
|
|
|
]
|
2019-03-12 13:12:46 -04:00
|
|
|
|
|
|
|
|
2019-10-15 09:56:12 -04:00
|
|
|
def gen_filler(duration):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
when playlist is not 24 hours long, we generate a loop from filler clip
|
|
|
|
"""
|
2019-08-04 15:48:01 -04:00
|
|
|
if not _storage.filler:
|
2019-07-19 08:58:28 -04:00
|
|
|
# when no filler is set, generate a dummy
|
|
|
|
logger.warning('No filler is set!')
|
|
|
|
return gen_dummy(duration)
|
|
|
|
else:
|
|
|
|
# get duration from filler
|
|
|
|
cmd = [
|
|
|
|
'ffprobe', '-v', 'error', '-show_entries', 'format=duration',
|
2019-08-04 15:48:01 -04:00
|
|
|
'-of', 'default=noprint_wrappers=1:nokey=1', _storage.filler]
|
2019-07-19 08:58:28 -04:00
|
|
|
|
|
|
|
try:
|
2019-10-15 09:56:12 -04:00
|
|
|
file_duration = float(check_output(cmd).decode('utf-8'))
|
2019-07-19 08:58:28 -04:00
|
|
|
except (CalledProcessError, ValueError):
|
2019-10-15 09:56:12 -04:00
|
|
|
file_duration = None
|
2019-07-19 08:58:28 -04:00
|
|
|
|
2019-10-15 09:56:12 -04:00
|
|
|
if file_duration:
|
|
|
|
if file_duration > duration:
|
2019-07-19 08:58:28 -04:00
|
|
|
# cut filler
|
2019-09-06 06:02:11 -04:00
|
|
|
logger.info(
|
|
|
|
'Generate filler with {0:.2f} seconds'.format(duration))
|
2019-08-04 15:48:01 -04:00
|
|
|
return ['-i', _storage.filler] + set_length(
|
2019-10-15 09:56:12 -04:00
|
|
|
file_duration, 0, duration)
|
2019-07-19 08:58:28 -04:00
|
|
|
else:
|
2019-10-15 09:56:12 -04:00
|
|
|
# loop file n times
|
|
|
|
return loop_input(_storage.filler, file_duration, duration)
|
2019-07-19 08:58:28 -04:00
|
|
|
else:
|
|
|
|
logger.error("Can't get filler length, generate dummy!")
|
|
|
|
return gen_dummy(duration)
|
|
|
|
|
|
|
|
|
2019-03-27 04:34:13 -04:00
|
|
|
def src_or_dummy(src, dur, seek, out):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
when source path exist, generate input with seek and out time
|
|
|
|
when path not exist, generate dummy clip
|
|
|
|
"""
|
2019-10-12 16:04:35 -04:00
|
|
|
prefix = src.split('://')[0]
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
# check if input is a live source
|
|
|
|
if src and prefix in _pre_comp.protocols or os.path.isfile(src):
|
2019-10-15 09:56:12 -04:00
|
|
|
if out > dur:
|
|
|
|
return loop_input(src, dur, out)
|
|
|
|
else:
|
|
|
|
return seek_in(seek) + ['-i', src] + set_length(dur, seek, out)
|
2019-03-12 13:12:46 -04:00
|
|
|
else:
|
2019-10-12 16:04:35 -04:00
|
|
|
mailer.error('Clip not exist:\n{}'.format(src))
|
|
|
|
logger.error('Clip not exist: {}'.format(src))
|
2019-03-27 04:34:13 -04:00
|
|
|
return gen_dummy(out - seek)
|
2019-03-12 13:12:46 -04:00
|
|
|
|
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
def gen_input(src, begin, dur, seek, out, last):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
prepare input clip
|
|
|
|
check begin and length from clip
|
|
|
|
return clip only if we are in 24 hours time range
|
|
|
|
"""
|
2019-03-12 13:12:46 -04:00
|
|
|
day_in_sec = 86400.0
|
2019-07-19 06:49:01 -04:00
|
|
|
ref_time = day_in_sec
|
2019-10-12 16:04:35 -04:00
|
|
|
current_time = get_time('full_sec')
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-07-19 06:49:01 -04:00
|
|
|
if _playlist.start:
|
|
|
|
ref_time = day_in_sec + _playlist.start
|
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
if 0 <= current_time < _playlist.start:
|
|
|
|
current_time += day_in_sec
|
2019-03-12 13:12:46 -04:00
|
|
|
|
|
|
|
# calculate time difference to see if we are sync
|
2019-10-12 16:04:35 -04:00
|
|
|
time_diff = out - seek + current_time
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-05-30 02:48:48 -04:00
|
|
|
if ((time_diff <= ref_time or begin < day_in_sec) and not last) \
|
2019-10-12 16:04:35 -04:00
|
|
|
or not _playlist.start:
|
2019-03-12 13:12:46 -04:00
|
|
|
# when we are in the 24 houre range, get the clip
|
2019-03-27 04:51:48 -04:00
|
|
|
return src_or_dummy(src, dur, seek, out), None
|
2019-03-12 13:12:46 -04:00
|
|
|
elif time_diff < ref_time and last:
|
|
|
|
# when last clip is passed and we still have too much time left
|
|
|
|
# check if duration is larger then out - seek
|
2019-10-12 16:04:35 -04:00
|
|
|
time_diff = dur + current_time
|
2019-03-12 13:12:46 -04:00
|
|
|
new_len = dur - (time_diff - ref_time)
|
|
|
|
|
|
|
|
if time_diff >= ref_time:
|
2019-10-12 16:04:35 -04:00
|
|
|
logger.info('we are under time, new_len is: {}'.format(new_len))
|
|
|
|
src_cmd = src_or_dummy(src, dur, 0, new_len)
|
2019-03-12 13:12:46 -04:00
|
|
|
else:
|
|
|
|
src_cmd = src_or_dummy(src, dur, 0, dur)
|
|
|
|
|
2019-05-10 05:10:08 -04:00
|
|
|
mailer.error(
|
|
|
|
'Playlist is not long enough:\n{} seconds needed.'.format(
|
2019-10-12 16:04:35 -04:00
|
|
|
new_len))
|
2019-03-12 13:12:46 -04:00
|
|
|
logger.error('Playlist is {} seconds to short'.format(new_len))
|
|
|
|
|
|
|
|
return src_cmd, new_len - dur
|
|
|
|
|
|
|
|
elif time_diff > ref_time:
|
|
|
|
new_len = out - seek - (time_diff - ref_time)
|
|
|
|
# when we over the 24 hours range, trim clip
|
|
|
|
logger.info('we are over time, new_len is: {}'.format(new_len))
|
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
# When calculated length from last clip is longer then 5 seconds,
|
|
|
|
# we use the clip. When the length is less then 5 and bigger the 1
|
|
|
|
# second we generate a black clip and when is less the a seconds
|
|
|
|
# we skip the clip.
|
2019-03-12 13:12:46 -04:00
|
|
|
if new_len > 5.0:
|
2019-10-12 16:04:35 -04:00
|
|
|
src_cmd = src_or_dummy(src, dur, seek, new_len)
|
2019-03-12 13:12:46 -04:00
|
|
|
elif new_len > 1.0:
|
|
|
|
src_cmd = gen_dummy(new_len)
|
|
|
|
else:
|
|
|
|
src_cmd = None
|
|
|
|
|
|
|
|
return src_cmd, 0.0
|
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
else:
|
|
|
|
return None, 0.0
|
|
|
|
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# building filters,
|
|
|
|
# when is needed add individuell filters to match output format
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def deinterlace_filter(probe):
|
|
|
|
"""
|
|
|
|
when material is interlaced,
|
|
|
|
set deinterlacing filter
|
|
|
|
"""
|
|
|
|
filter_chain = []
|
|
|
|
|
|
|
|
if 'field_order' in probe.video[0] and \
|
|
|
|
probe.video[0]['field_order'] != 'progressive':
|
|
|
|
filter_chain.append('yadif=0:-1:0')
|
|
|
|
|
|
|
|
return filter_chain
|
|
|
|
|
|
|
|
|
|
|
|
def pad_filter(probe):
|
|
|
|
"""
|
|
|
|
if source and target aspect is different,
|
|
|
|
fix it with pillarbox or letterbox
|
|
|
|
"""
|
|
|
|
filter_chain = []
|
|
|
|
|
|
|
|
if not math.isclose(probe.video[0]['aspect'],
|
|
|
|
_pre_comp.aspect, abs_tol=0.03):
|
|
|
|
if probe.video[0]['aspect'] < _pre_comp.aspect:
|
|
|
|
filter_chain.append(
|
|
|
|
'pad=ih*{}/{}/sar:ih:(ow-iw)/2:(oh-ih)/2'.format(_pre_comp.w,
|
|
|
|
_pre_comp.h))
|
|
|
|
elif probe.video[0]['aspect'] > _pre_comp.aspect:
|
|
|
|
filter_chain.append(
|
|
|
|
'pad=iw:iw*{}/{}/sar:(ow-iw)/2:(oh-ih)/2'.format(_pre_comp.h,
|
|
|
|
_pre_comp.w))
|
|
|
|
|
|
|
|
return filter_chain
|
|
|
|
|
|
|
|
|
|
|
|
def fps_filter(probe):
|
|
|
|
"""
|
|
|
|
changing frame rate
|
|
|
|
"""
|
|
|
|
filter_chain = []
|
|
|
|
|
|
|
|
if probe.video[0]['fps'] != _pre_comp.fps:
|
|
|
|
filter_chain.append('framerate=fps={}'.format(_pre_comp.fps))
|
|
|
|
|
|
|
|
return filter_chain
|
|
|
|
|
|
|
|
|
|
|
|
def scale_filter(probe):
|
|
|
|
"""
|
|
|
|
if target resolution is different to source add scale filter,
|
|
|
|
apply also an aspect filter, when is different
|
|
|
|
"""
|
|
|
|
filter_chain = []
|
|
|
|
|
|
|
|
if int(probe.video[0]['width']) != _pre_comp.w or \
|
|
|
|
int(probe.video[0]['height']) != _pre_comp.h:
|
|
|
|
filter_chain.append('scale={}:{}'.format(_pre_comp.w, _pre_comp.h))
|
|
|
|
|
2019-09-09 12:10:25 -04:00
|
|
|
if not math.isclose(probe.video[0]['aspect'],
|
|
|
|
_pre_comp.aspect, abs_tol=0.03):
|
2019-09-01 16:42:00 -04:00
|
|
|
filter_chain.append('setdar=dar={}'.format(_pre_comp.aspect))
|
|
|
|
|
|
|
|
return filter_chain
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
|
|
|
|
def fade_filter(first, duration, seek, out, track=''):
|
|
|
|
"""
|
|
|
|
fade in/out video, when is cutted at the begin or end
|
|
|
|
"""
|
|
|
|
filter_chain = []
|
2019-03-12 13:12:46 -04:00
|
|
|
|
|
|
|
if seek > 0.0 and not first:
|
2019-09-01 16:42:00 -04:00
|
|
|
filter_chain.append('{}fade=in:st=0:d=0.5'.format(track))
|
2019-03-12 13:12:46 -04:00
|
|
|
|
|
|
|
if out < duration:
|
2019-09-01 16:42:00 -04:00
|
|
|
filter_chain.append('{}fade=out:st={}:d=1.0'.format(track,
|
|
|
|
out - seek - 1.0))
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
return filter_chain
|
2019-03-12 13:12:46 -04:00
|
|
|
|
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
def overlay_filter(duration, ad, ad_last, ad_next):
|
|
|
|
"""
|
|
|
|
overlay logo: when is an ad don't overlay,
|
|
|
|
when ad is comming next fade logo out,
|
|
|
|
when clip before was an ad fade logo in
|
|
|
|
"""
|
|
|
|
logo_filter = '[v]null[logo]'
|
|
|
|
|
|
|
|
if os.path.isfile(_pre_comp.logo) and not ad:
|
|
|
|
logo_chain = []
|
|
|
|
opacity = 'format=rgba,colorchannelmixer=aa={}'.format(
|
|
|
|
_pre_comp.opacity)
|
|
|
|
loop = 'loop=loop={}:size=1:start=0'.format(
|
|
|
|
duration * _pre_comp.fps)
|
|
|
|
logo_chain.append('movie={},{},{}'.format(
|
|
|
|
_pre_comp.logo, loop, opacity))
|
2019-03-12 13:12:46 -04:00
|
|
|
if ad_last:
|
|
|
|
logo_chain.append('fade=in:st=0:d=1.0:alpha=1')
|
|
|
|
if ad_next:
|
2019-09-01 16:42:00 -04:00
|
|
|
logo_chain.append('fade=out:st={}:d=1.0:alpha=1'.format(
|
|
|
|
duration - 1))
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
logo_filter = '{}[l];[v][l]{}[logo]'.format(
|
|
|
|
','.join(logo_chain), _pre_comp.logo_filter)
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-09-09 14:09:18 -04:00
|
|
|
return logo_filter
|
2019-09-01 16:42:00 -04:00
|
|
|
|
|
|
|
|
|
|
|
def add_audio(probe, duration):
|
|
|
|
"""
|
|
|
|
when clip has no audio we generate an audio line
|
|
|
|
"""
|
|
|
|
line = []
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
if not probe.audio:
|
2019-10-03 15:40:36 -04:00
|
|
|
logger.warning('Clip has no audio!')
|
2019-09-01 16:42:00 -04:00
|
|
|
line = [
|
|
|
|
'aevalsrc=0:channel_layout=2:duration={}:sample_rate={}'.format(
|
|
|
|
duration, 48000)]
|
|
|
|
|
|
|
|
return line
|
|
|
|
|
|
|
|
|
2019-09-11 17:20:24 -04:00
|
|
|
def add_loudnorm(probe):
|
|
|
|
"""
|
|
|
|
add single pass loudnorm filter to audio line
|
|
|
|
"""
|
|
|
|
loud_filter = []
|
2019-10-08 11:16:10 -04:00
|
|
|
a_samples = int(192000 / _pre_comp.fps)
|
2019-09-11 17:20:24 -04:00
|
|
|
|
|
|
|
if probe.audio and _pre_comp.add_loudnorm:
|
2019-10-08 11:16:10 -04:00
|
|
|
loud_filter = [('loudnorm=I={}:TP={}:LRA={},'
|
|
|
|
'asetnsamples=n={}').format(_pre_comp.loud_i,
|
|
|
|
_pre_comp.loud_tp,
|
|
|
|
_pre_comp.loud_lra,
|
|
|
|
a_samples)]
|
2019-09-11 17:20:24 -04:00
|
|
|
|
|
|
|
return loud_filter
|
|
|
|
|
|
|
|
|
2019-09-04 15:27:38 -04:00
|
|
|
def extend_audio(probe, duration):
|
|
|
|
"""
|
|
|
|
check audio duration, is it shorter then clip duration - pad it
|
|
|
|
"""
|
|
|
|
pad_filter = []
|
|
|
|
|
|
|
|
if probe.audio and 'duration' in probe.audio[0] and \
|
|
|
|
duration > float(probe.audio[0]['duration']) + 0.3:
|
|
|
|
pad_filter.append('apad=whole_dur={}'.format(duration))
|
|
|
|
|
|
|
|
return pad_filter
|
|
|
|
|
|
|
|
|
2019-10-15 09:56:12 -04:00
|
|
|
def extend_video(probe, duration, target_duration):
|
2019-09-04 15:27:38 -04:00
|
|
|
"""
|
|
|
|
check video duration, is is shorter then clip duration - pad it
|
|
|
|
"""
|
|
|
|
pad_filter = []
|
|
|
|
|
|
|
|
if 'duration' in probe.video[0] and \
|
2019-10-15 09:56:12 -04:00
|
|
|
target_duration < duration > float(
|
|
|
|
probe.video[0]['duration']) + 0.3:
|
2019-09-04 15:27:38 -04:00
|
|
|
pad_filter.append('tpad=stop_mode=add:stop_duration={}'.format(
|
|
|
|
duration - float(probe.video[0]['duration'])))
|
|
|
|
|
|
|
|
return pad_filter
|
|
|
|
|
|
|
|
|
2019-10-15 09:56:12 -04:00
|
|
|
def build_filtergraph(first, duration, seek, out, ad,
|
|
|
|
ad_last, ad_next, dummy, probe):
|
2019-09-01 16:42:00 -04:00
|
|
|
"""
|
|
|
|
build final filter graph, with video and audio chain
|
|
|
|
"""
|
|
|
|
video_chain = []
|
|
|
|
audio_chain = []
|
|
|
|
video_map = ['-map', '[logo]']
|
|
|
|
|
2019-10-15 09:56:12 -04:00
|
|
|
if out > duration:
|
|
|
|
seek = 0
|
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
if not dummy:
|
|
|
|
video_chain += deinterlace_filter(probe)
|
|
|
|
video_chain += pad_filter(probe)
|
|
|
|
video_chain += fps_filter(probe)
|
|
|
|
video_chain += scale_filter(probe)
|
2019-10-15 09:56:12 -04:00
|
|
|
video_chain += extend_video(probe, duration, out - seek)
|
2019-09-01 16:42:00 -04:00
|
|
|
video_chain += fade_filter(first, duration, seek, out)
|
|
|
|
|
|
|
|
audio_chain += add_audio(probe, out - seek)
|
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
if not audio_chain:
|
|
|
|
audio_chain.append('[0:a]anull')
|
|
|
|
audio_chain += add_loudnorm(probe)
|
|
|
|
audio_chain += extend_audio(probe, out - seek)
|
|
|
|
audio_chain += fade_filter(first, duration, seek, out, 'a')
|
|
|
|
|
2019-09-01 16:42:00 -04:00
|
|
|
if video_chain:
|
|
|
|
video_filter = '{}[v]'.format(','.join(video_chain))
|
|
|
|
else:
|
|
|
|
video_filter = 'null[v]'
|
|
|
|
|
|
|
|
logo_filter = overlay_filter(out - seek, ad, ad_last, ad_next)
|
|
|
|
video_filter = [
|
|
|
|
'-filter_complex', '[0:v]{};{}'.format(
|
|
|
|
video_filter, logo_filter)]
|
|
|
|
|
|
|
|
if audio_chain:
|
|
|
|
audio_filter = [
|
|
|
|
'-filter_complex', '{}[a]'.format(','.join(audio_chain))]
|
|
|
|
audio_map = ['-map', '[a]']
|
|
|
|
else:
|
|
|
|
audio_filter = []
|
|
|
|
audio_map = ['-map', '0:a']
|
|
|
|
|
|
|
|
if dummy:
|
2019-03-20 12:26:58 -04:00
|
|
|
return video_filter + video_map + ['-map', '1:a']
|
2019-03-12 13:12:46 -04:00
|
|
|
else:
|
2019-03-12 16:07:15 -04:00
|
|
|
return video_filter + audio_filter + video_map + audio_map
|
2019-03-12 13:12:46 -04:00
|
|
|
|
|
|
|
|
2019-06-06 12:16:33 -04:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# folder watcher
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
class MediaStore:
|
|
|
|
"""
|
|
|
|
fill media list for playing
|
|
|
|
MediaWatch will interact with add and remove
|
|
|
|
"""
|
|
|
|
|
2019-09-03 15:14:40 -04:00
|
|
|
def __init__(self):
|
2019-06-06 12:16:33 -04:00
|
|
|
self.store = []
|
|
|
|
|
2019-09-03 15:14:40 -04:00
|
|
|
if stdin_args.folder:
|
|
|
|
self.folder = stdin_args.folder
|
|
|
|
else:
|
|
|
|
self.folder = _storage.path
|
|
|
|
|
|
|
|
self.fill()
|
|
|
|
|
|
|
|
def fill(self):
|
|
|
|
for ext in _storage.extensions:
|
2019-06-06 12:16:33 -04:00
|
|
|
self.store.extend(
|
2019-09-03 15:14:40 -04:00
|
|
|
glob.glob(os.path.join(self.folder, '**', ext),
|
|
|
|
recursive=True))
|
2019-06-06 12:16:33 -04:00
|
|
|
|
|
|
|
self.sort()
|
|
|
|
|
|
|
|
def add(self, file):
|
|
|
|
self.store.append(file)
|
|
|
|
self.sort()
|
|
|
|
|
|
|
|
def remove(self, file):
|
|
|
|
self.store.remove(file)
|
|
|
|
self.sort()
|
|
|
|
|
|
|
|
def sort(self):
|
|
|
|
# sort list for sorted playing
|
|
|
|
self.store = sorted(self.store)
|
|
|
|
|
|
|
|
|
|
|
|
class MediaWatcher:
|
|
|
|
"""
|
|
|
|
watch given folder for file changes and update media list
|
|
|
|
"""
|
|
|
|
|
2019-09-08 15:24:02 -04:00
|
|
|
def __init__(self, media):
|
2019-06-06 12:16:33 -04:00
|
|
|
self._media = media
|
|
|
|
|
2019-09-08 15:24:02 -04:00
|
|
|
self.event_handler = PatternMatchingEventHandler(
|
|
|
|
patterns=_storage.extensions)
|
2019-06-06 12:16:33 -04:00
|
|
|
self.event_handler.on_created = self.on_created
|
|
|
|
self.event_handler.on_moved = self.on_moved
|
|
|
|
self.event_handler.on_deleted = self.on_deleted
|
|
|
|
|
|
|
|
self.observer = Observer()
|
2019-09-08 15:24:02 -04:00
|
|
|
self.observer.schedule(self.event_handler, self._media.folder,
|
|
|
|
recursive=True)
|
2019-06-06 12:16:33 -04:00
|
|
|
|
|
|
|
self.observer.start()
|
|
|
|
|
|
|
|
def on_created(self, event):
|
|
|
|
# add file to media list only if it is completely copied
|
|
|
|
file_size = -1
|
|
|
|
while file_size != os.path.getsize(event.src_path):
|
|
|
|
file_size = os.path.getsize(event.src_path)
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
self._media.add(event.src_path)
|
|
|
|
|
2019-06-07 07:34:48 -04:00
|
|
|
logger.info('Add file to media list: "{}"'.format(event.src_path))
|
2019-06-06 12:16:33 -04:00
|
|
|
|
|
|
|
def on_moved(self, event):
|
|
|
|
self._media.remove(event.src_path)
|
|
|
|
self._media.add(event.dest_path)
|
|
|
|
|
2019-06-07 07:34:48 -04:00
|
|
|
logger.info('Move file from "{}" to "{}"'.format(event.src_path,
|
|
|
|
event.dest_path))
|
2019-06-06 12:16:33 -04:00
|
|
|
|
|
|
|
def on_deleted(self, event):
|
|
|
|
self._media.remove(event.src_path)
|
|
|
|
|
2019-06-07 07:34:48 -04:00
|
|
|
logger.info('Remove file from media list: "{}"'.format(event.src_path))
|
2019-06-06 12:16:33 -04:00
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
self.observer.stop()
|
|
|
|
self.observer.join()
|
|
|
|
|
|
|
|
|
|
|
|
class GetSource:
|
|
|
|
"""
|
|
|
|
give next clip, depending on shuffle mode
|
|
|
|
"""
|
|
|
|
|
2019-09-08 15:24:02 -04:00
|
|
|
def __init__(self, media):
|
2019-06-06 12:16:33 -04:00
|
|
|
self._media = media
|
|
|
|
|
|
|
|
self.last_played = []
|
|
|
|
self.index = 0
|
2019-09-01 16:42:00 -04:00
|
|
|
self.probe = MediaProbe()
|
2019-06-06 12:16:33 -04:00
|
|
|
|
|
|
|
def next(self):
|
|
|
|
while True:
|
2019-09-08 15:24:02 -04:00
|
|
|
if _storage.shuffle:
|
2019-06-06 12:16:33 -04:00
|
|
|
clip = random.choice(self._media.store)
|
|
|
|
|
|
|
|
if len(self.last_played) > len(self._media.store) / 2:
|
|
|
|
self.last_played.pop(0)
|
|
|
|
|
|
|
|
if clip not in self.last_played:
|
|
|
|
self.last_played.append(clip)
|
2019-09-01 16:42:00 -04:00
|
|
|
self.probe.load(clip)
|
|
|
|
filtergraph = build_filtergraph(
|
2019-09-04 15:27:38 -04:00
|
|
|
False, float(self.probe.format['duration']), 0.0,
|
|
|
|
float(self.probe.format['duration']), False, False,
|
2019-09-01 16:42:00 -04:00
|
|
|
False, False, self.probe)
|
|
|
|
|
|
|
|
yield ['-i', clip] + filtergraph
|
2019-06-06 12:16:33 -04:00
|
|
|
|
|
|
|
else:
|
|
|
|
while self.index < len(self._media.store):
|
2019-09-01 16:42:00 -04:00
|
|
|
self.probe.load(self._media.store[self.index])
|
|
|
|
filtergraph = build_filtergraph(
|
2019-09-04 15:27:38 -04:00
|
|
|
False, float(self.probe.format['duration']), 0.0,
|
|
|
|
float(self.probe.format['duration']), False, False,
|
2019-09-01 16:42:00 -04:00
|
|
|
False, False, self.probe)
|
|
|
|
|
2019-06-06 12:16:33 -04:00
|
|
|
yield [
|
|
|
|
'-i', self._media.store[self.index]
|
2019-09-01 16:42:00 -04:00
|
|
|
] + filtergraph
|
2019-06-06 12:16:33 -04:00
|
|
|
self.index += 1
|
|
|
|
else:
|
|
|
|
self.index = 0
|
|
|
|
|
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# main functions
|
|
|
|
# ------------------------------------------------------------------------------
|
2018-03-20 16:33:21 -04:00
|
|
|
|
2019-05-28 15:48:53 -04:00
|
|
|
class GetSourceIter(object):
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
read values from json playlist,
|
|
|
|
get current clip in time,
|
|
|
|
set ffmpeg source command
|
|
|
|
"""
|
2019-09-09 14:09:18 -04:00
|
|
|
|
2019-06-03 15:33:29 -04:00
|
|
|
def __init__(self, encoder):
|
|
|
|
self._encoder = encoder
|
2019-10-12 16:04:35 -04:00
|
|
|
self.init_time = get_time('full_sec')
|
|
|
|
self.last_time = self.init_time
|
2019-09-06 06:02:11 -04:00
|
|
|
self.day_in_sec = 86400.0
|
2019-03-19 13:03:31 -04:00
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
# when _playlist.start is set, use start time
|
|
|
|
if _playlist.start:
|
|
|
|
self.init_time = _playlist.start
|
|
|
|
|
|
|
|
if 0 <= self.last_time < _playlist.start:
|
|
|
|
self.last_time += self.day_in_sec
|
2019-03-19 13:03:31 -04:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
self.last_mod_time = 0.0
|
|
|
|
self.json_file = None
|
|
|
|
self.clip_nodes = None
|
|
|
|
self.src_cmd = None
|
2019-09-01 16:42:00 -04:00
|
|
|
self.probe = MediaProbe()
|
2019-03-12 13:12:46 -04:00
|
|
|
self.filtergraph = []
|
2019-03-08 08:51:37 -05:00
|
|
|
self.first = True
|
|
|
|
self.last = False
|
|
|
|
self.list_date = get_date(True)
|
2019-03-13 10:05:04 -04:00
|
|
|
self.is_dummy = False
|
2019-03-19 13:03:31 -04:00
|
|
|
self.last_error = ''
|
|
|
|
self.timestamp = get_time('stamp')
|
2019-03-08 08:51:37 -05:00
|
|
|
|
2019-03-13 10:05:04 -04:00
|
|
|
self.src = None
|
|
|
|
self.seek = 0
|
2019-03-18 16:23:56 -04:00
|
|
|
self.out = 20
|
|
|
|
self.duration = 20
|
2019-03-13 10:05:04 -04:00
|
|
|
self.ad = False
|
|
|
|
self.ad_last = False
|
|
|
|
self.ad_next = False
|
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
def get_playlist(self):
|
2019-08-09 11:55:21 -04:00
|
|
|
if stdin_args.playlist:
|
|
|
|
self.json_file = stdin_args.playlist
|
2019-03-18 16:23:56 -04:00
|
|
|
else:
|
|
|
|
year, month, day = self.list_date.split('-')
|
|
|
|
self.json_file = os.path.join(
|
|
|
|
_playlist.path, year, month, self.list_date + '.json')
|
2019-03-08 08:51:37 -05:00
|
|
|
|
2019-08-12 08:30:47 -04:00
|
|
|
if '://' in self.json_file:
|
|
|
|
self.json_file = self.json_file.replace('\\', '/')
|
|
|
|
|
|
|
|
try:
|
|
|
|
req = request.urlopen(self.json_file,
|
|
|
|
timeout=1,
|
|
|
|
context=ssl._create_unverified_context())
|
|
|
|
b_time = req.headers['last-modified']
|
2019-08-13 15:41:10 -04:00
|
|
|
temp_time = time.strptime(b_time, "%a, %d %b %Y %H:%M:%S %Z")
|
2019-08-12 08:30:47 -04:00
|
|
|
mod_time = time.mktime(temp_time)
|
|
|
|
|
|
|
|
if mod_time > self.last_mod_time:
|
2019-09-09 10:13:18 -04:00
|
|
|
self.clip_nodes = valid_json(req)
|
2019-08-12 08:30:47 -04:00
|
|
|
self.last_mod_time = mod_time
|
|
|
|
logger.info('open: ' + self.json_file)
|
|
|
|
validate_thread(self.clip_nodes)
|
|
|
|
except (request.URLError, socket.timeout):
|
|
|
|
self.eof_handling('Get playlist from url failed!', False)
|
|
|
|
|
|
|
|
elif os.path.isfile(self.json_file):
|
2019-03-08 08:51:37 -05:00
|
|
|
# check last modification from playlist
|
|
|
|
mod_time = os.path.getmtime(self.json_file)
|
|
|
|
if mod_time > self.last_mod_time:
|
2019-03-26 16:04:31 -04:00
|
|
|
with open(self.json_file, 'r', encoding='utf-8') as f:
|
2019-09-09 10:13:18 -04:00
|
|
|
self.clip_nodes = valid_json(f)
|
2019-03-08 08:51:37 -05:00
|
|
|
|
2019-03-09 14:12:20 -05:00
|
|
|
self.last_mod_time = mod_time
|
|
|
|
logger.info('open: ' + self.json_file)
|
|
|
|
validate_thread(self.clip_nodes)
|
2019-03-08 08:51:37 -05:00
|
|
|
else:
|
|
|
|
# when we have no playlist for the current day,
|
|
|
|
# then we generate a black clip
|
|
|
|
# and calculate the seek in time, for when the playlist comes back
|
2019-07-19 08:58:28 -04:00
|
|
|
self.eof_handling('Playlist not exist:', False)
|
2018-03-20 16:33:21 -04:00
|
|
|
|
2019-03-29 09:55:51 -04:00
|
|
|
def get_clip_in_out(self, node):
|
|
|
|
if is_float(node["in"]):
|
|
|
|
self.seek = node["in"]
|
|
|
|
else:
|
|
|
|
self.seek = 0
|
|
|
|
|
|
|
|
if is_float(node["duration"]):
|
|
|
|
self.duration = node["duration"]
|
|
|
|
else:
|
|
|
|
self.duration = 20
|
|
|
|
|
|
|
|
if is_float(node["out"]):
|
|
|
|
self.out = node["out"]
|
|
|
|
else:
|
|
|
|
self.out = self.duration
|
|
|
|
|
2019-03-13 10:05:04 -04:00
|
|
|
def url_or_live_source(self):
|
|
|
|
prefix = self.src.split('://')[0]
|
|
|
|
|
|
|
|
# check if input is a live source
|
2019-07-31 04:10:26 -04:00
|
|
|
if self.src and prefix in _pre_comp.protocols:
|
2019-03-13 10:05:04 -04:00
|
|
|
cmd = [
|
|
|
|
'ffprobe', '-v', 'error', '-show_entries', 'format=duration',
|
|
|
|
'-of', 'default=noprint_wrappers=1:nokey=1', self.src]
|
|
|
|
|
|
|
|
try:
|
|
|
|
output = check_output(cmd).decode('utf-8')
|
2019-07-31 04:10:26 -04:00
|
|
|
except CalledProcessError as err:
|
|
|
|
logger.error("ffprobe error: {}".format(err))
|
2019-03-13 10:05:04 -04:00
|
|
|
output = None
|
|
|
|
|
|
|
|
if not output:
|
2019-05-10 05:10:08 -04:00
|
|
|
mailer.error('Clip not exist:\n{}'.format(self.src))
|
2019-03-13 10:05:04 -04:00
|
|
|
logger.error('Clip not exist: {}'.format(self.src))
|
2019-03-27 04:34:13 -04:00
|
|
|
self.src = None
|
2019-03-13 10:05:04 -04:00
|
|
|
elif is_float(output):
|
|
|
|
self.duration = float(output)
|
|
|
|
else:
|
2019-09-06 06:02:11 -04:00
|
|
|
self.duration = self.day_in_sec
|
2019-03-13 10:05:04 -04:00
|
|
|
self.out = self.out - self.seek
|
|
|
|
self.seek = 0
|
|
|
|
|
2019-03-29 09:55:51 -04:00
|
|
|
def get_input(self):
|
|
|
|
self.src_cmd, self.time_left = gen_input(
|
2019-10-12 16:04:35 -04:00
|
|
|
self.src, self.begin, self.duration,
|
2019-03-29 09:55:51 -04:00
|
|
|
self.seek, self.out, self.last
|
|
|
|
)
|
2019-03-13 10:05:04 -04:00
|
|
|
|
2019-03-29 09:55:51 -04:00
|
|
|
def is_source_dummy(self):
|
|
|
|
if self.src_cmd and 'lavfi' in self.src_cmd:
|
|
|
|
self.is_dummy = True
|
2019-03-13 10:05:04 -04:00
|
|
|
else:
|
2019-03-29 09:55:51 -04:00
|
|
|
self.is_dummy = False
|
2019-03-13 10:05:04 -04:00
|
|
|
|
|
|
|
def get_category(self, index, node):
|
|
|
|
if 'category' in node:
|
|
|
|
if index - 1 >= 0:
|
|
|
|
last_category = self.clip_nodes[
|
|
|
|
"program"][index - 1]["category"]
|
|
|
|
else:
|
2019-05-10 05:10:08 -04:00
|
|
|
last_category = 'noad'
|
2019-03-13 10:05:04 -04:00
|
|
|
|
|
|
|
if index + 2 <= len(self.clip_nodes["program"]):
|
|
|
|
next_category = self.clip_nodes[
|
|
|
|
"program"][index + 1]["category"]
|
|
|
|
else:
|
2019-05-10 05:10:08 -04:00
|
|
|
next_category = 'noad'
|
2019-03-13 10:05:04 -04:00
|
|
|
|
|
|
|
if node["category"] == 'advertisement':
|
|
|
|
self.ad = True
|
|
|
|
else:
|
|
|
|
self.ad = False
|
|
|
|
|
|
|
|
if last_category == 'advertisement':
|
|
|
|
self.ad_last = True
|
|
|
|
else:
|
|
|
|
self.ad_last = False
|
|
|
|
|
|
|
|
if next_category == 'advertisement':
|
|
|
|
self.ad_next = True
|
|
|
|
else:
|
|
|
|
self.ad_next = False
|
|
|
|
|
|
|
|
def set_filtergraph(self):
|
|
|
|
self.filtergraph = build_filtergraph(
|
|
|
|
self.first, self.duration, self.seek, self.out,
|
2019-09-01 16:42:00 -04:00
|
|
|
self.ad, self.ad_last, self.ad_next, self.is_dummy, self.probe)
|
2019-03-13 10:05:04 -04:00
|
|
|
|
2019-07-19 08:58:28 -04:00
|
|
|
def eof_handling(self, message, filler):
|
2019-03-18 05:54:07 -04:00
|
|
|
self.seek = 0.0
|
2019-03-20 12:26:58 -04:00
|
|
|
self.ad = False
|
2019-03-19 13:03:31 -04:00
|
|
|
|
2019-09-06 06:02:11 -04:00
|
|
|
ref_time = self.day_in_sec
|
2019-03-19 13:03:31 -04:00
|
|
|
time = get_time('full_sec')
|
|
|
|
|
2019-07-19 06:49:01 -04:00
|
|
|
if _playlist.start:
|
2019-09-06 06:02:11 -04:00
|
|
|
ref_time = self.day_in_sec + _playlist.start
|
2019-07-19 06:49:01 -04:00
|
|
|
|
|
|
|
if 0 <= time < _playlist.start:
|
2019-09-06 06:02:11 -04:00
|
|
|
time += self.day_in_sec
|
2018-02-19 05:12:13 -05:00
|
|
|
|
2019-05-27 06:20:46 -04:00
|
|
|
time_diff = self.out - self.seek + time
|
2019-03-27 04:34:13 -04:00
|
|
|
new_len = self.out - self.seek - (time_diff - ref_time)
|
2019-03-19 13:03:31 -04:00
|
|
|
|
2019-09-06 06:02:11 -04:00
|
|
|
self.out = abs(new_len)
|
|
|
|
self.duration = abs(new_len)
|
|
|
|
self.list_date = get_date(False)
|
|
|
|
self.last_mod_time = 0.0
|
|
|
|
self.first = False
|
|
|
|
self.last_time = 0.0
|
2018-02-19 05:12:13 -05:00
|
|
|
|
2019-07-19 08:58:28 -04:00
|
|
|
if filler:
|
2019-10-15 09:56:12 -04:00
|
|
|
self.src_cmd = gen_filler(self.duration)
|
2019-07-19 08:58:28 -04:00
|
|
|
|
2019-08-04 15:48:01 -04:00
|
|
|
if _storage.filler:
|
2019-07-19 08:58:28 -04:00
|
|
|
self.is_dummy = False
|
|
|
|
self.duration += 1
|
2019-10-15 09:56:12 -04:00
|
|
|
self.probe.load(_storage.filler)
|
2019-08-14 05:31:51 -04:00
|
|
|
else:
|
|
|
|
self.is_dummy = True
|
2019-07-19 08:58:28 -04:00
|
|
|
else:
|
2019-10-12 16:04:35 -04:00
|
|
|
self.src_cmd = gen_dummy(self.duration)
|
2019-07-19 08:58:28 -04:00
|
|
|
self.is_dummy = True
|
2019-03-19 13:03:31 -04:00
|
|
|
self.set_filtergraph()
|
2018-01-07 07:58:45 -05:00
|
|
|
|
2019-03-19 13:03:31 -04:00
|
|
|
if get_time('stamp') - self.timestamp > 3600 \
|
|
|
|
and message != self.last_error:
|
|
|
|
self.last_error = message
|
2019-05-10 05:10:08 -04:00
|
|
|
mailer.error('{}\n{}'.format(message, self.json_file))
|
2019-03-19 13:03:31 -04:00
|
|
|
self.timestamp = get_time('stamp')
|
2018-01-07 07:58:45 -05:00
|
|
|
|
2019-03-10 16:24:03 -04:00
|
|
|
logger.error('{} {}'.format(message, self.json_file))
|
2018-01-07 07:58:45 -05:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
self.last = False
|
2018-01-16 03:31:59 -05:00
|
|
|
|
2019-03-08 10:41:22 -05:00
|
|
|
def next(self):
|
2019-03-08 08:51:37 -05:00
|
|
|
while True:
|
|
|
|
self.get_playlist()
|
2019-03-06 09:06:06 -05:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
if self.clip_nodes is None:
|
2019-03-14 18:16:45 -04:00
|
|
|
self.is_dummy = True
|
2019-03-13 10:05:04 -04:00
|
|
|
self.set_filtergraph()
|
2019-06-04 04:16:15 -04:00
|
|
|
yield self.src_cmd + self.filtergraph
|
2019-03-08 10:41:22 -05:00
|
|
|
continue
|
2018-02-13 08:23:34 -05:00
|
|
|
|
2019-03-12 09:39:09 -04:00
|
|
|
self.begin = self.init_time
|
2019-03-06 09:06:06 -05:00
|
|
|
|
2018-02-28 15:29:42 -05:00
|
|
|
# loop through all clips in playlist
|
2019-03-08 08:51:37 -05:00
|
|
|
for index, node in enumerate(self.clip_nodes["program"]):
|
2019-03-29 09:55:51 -04:00
|
|
|
self.get_clip_in_out(node)
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2018-02-19 05:12:13 -05:00
|
|
|
# first time we end up here
|
2019-03-13 10:05:04 -04:00
|
|
|
if self.first and \
|
|
|
|
self.last_time < self.begin + self.out - self.seek:
|
2019-10-12 16:04:35 -04:00
|
|
|
if _playlist.start:
|
2019-03-12 09:39:09 -04:00
|
|
|
# calculate seek time
|
2019-03-13 10:05:04 -04:00
|
|
|
self.seek = self.last_time - self.begin + self.seek
|
2019-03-12 09:39:09 -04:00
|
|
|
|
2019-07-18 12:00:56 -04:00
|
|
|
self.src = node["source"]
|
2019-09-01 16:42:00 -04:00
|
|
|
self.probe.load(self.src)
|
2019-07-18 12:00:56 -04:00
|
|
|
|
2019-03-13 10:05:04 -04:00
|
|
|
self.url_or_live_source()
|
2019-03-29 09:55:51 -04:00
|
|
|
self.get_input()
|
|
|
|
self.is_source_dummy()
|
2019-03-13 10:05:04 -04:00
|
|
|
self.get_category(index, node)
|
|
|
|
self.set_filtergraph()
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
self.first = False
|
|
|
|
self.last_time = self.begin
|
2018-02-19 05:12:13 -05:00
|
|
|
break
|
2019-03-12 09:39:09 -04:00
|
|
|
elif self.last_time < self.begin:
|
|
|
|
if index + 1 == len(self.clip_nodes["program"]):
|
2019-03-09 14:12:20 -05:00
|
|
|
self.last = True
|
2018-02-19 05:12:13 -05:00
|
|
|
else:
|
2019-03-09 14:12:20 -05:00
|
|
|
self.last = False
|
2018-02-13 08:23:34 -05:00
|
|
|
|
2019-10-12 16:04:35 -04:00
|
|
|
if _playlist.start:
|
2019-06-03 15:33:29 -04:00
|
|
|
check_sync(self.begin, self._encoder)
|
2018-02-13 08:23:34 -05:00
|
|
|
|
2019-07-18 12:00:56 -04:00
|
|
|
self.src = node["source"]
|
2019-09-01 16:42:00 -04:00
|
|
|
self.probe.load(self.src)
|
2019-07-18 12:00:56 -04:00
|
|
|
|
2019-03-13 10:05:04 -04:00
|
|
|
self.url_or_live_source()
|
2019-03-29 09:55:51 -04:00
|
|
|
self.get_input()
|
|
|
|
self.is_source_dummy()
|
2019-03-13 10:05:04 -04:00
|
|
|
self.get_category(index, node)
|
|
|
|
self.set_filtergraph()
|
2019-03-12 13:12:46 -04:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
if self.time_left is None:
|
2018-03-20 16:33:21 -04:00
|
|
|
# normal behavior
|
2019-03-08 08:51:37 -05:00
|
|
|
self.last_time = self.begin
|
|
|
|
elif self.time_left > 0.0:
|
2018-03-20 16:33:21 -04:00
|
|
|
# when playlist is finish and we have time left
|
2019-03-08 08:51:37 -05:00
|
|
|
self.list_date = get_date(False)
|
|
|
|
self.last_time = self.begin
|
2019-03-27 04:34:13 -04:00
|
|
|
self.out = self.time_left
|
2018-03-20 16:33:21 -04:00
|
|
|
|
2019-07-19 08:58:28 -04:00
|
|
|
self.eof_handling(
|
|
|
|
'Playlist is not long enough!', False)
|
2019-03-20 12:26:58 -04:00
|
|
|
|
2018-03-20 16:33:21 -04:00
|
|
|
else:
|
|
|
|
# when there is no time left and we are in time,
|
|
|
|
# set right values for new playlist
|
2019-03-08 08:51:37 -05:00
|
|
|
self.list_date = get_date(False)
|
2019-03-19 13:03:31 -04:00
|
|
|
self.last_time = _playlist.start - 5
|
2019-03-08 08:51:37 -05:00
|
|
|
self.last_mod_time = 0.0
|
2018-03-20 16:33:21 -04:00
|
|
|
|
2018-02-19 05:12:13 -05:00
|
|
|
break
|
2019-03-06 09:06:06 -05:00
|
|
|
|
2019-03-13 10:05:04 -04:00
|
|
|
self.begin += self.out - self.seek
|
2018-02-19 05:12:13 -05:00
|
|
|
else:
|
2019-10-12 16:04:35 -04:00
|
|
|
if not _playlist.start or 'length' not in self.clip_nodes:
|
2019-07-19 08:58:28 -04:00
|
|
|
# when we reach currect end, stop script
|
2019-03-06 09:06:06 -05:00
|
|
|
logger.info('Playlist reach End!')
|
|
|
|
return
|
|
|
|
|
2019-07-19 08:58:28 -04:00
|
|
|
elif self.begin == self.init_time:
|
|
|
|
# no clip was played, generate dummy
|
|
|
|
self.eof_handling('Playlist is empty!', False)
|
|
|
|
else:
|
|
|
|
# playlist is not long enough, play filler
|
|
|
|
self.eof_handling('Playlist is not long enough!', True)
|
2018-02-19 05:12:13 -05:00
|
|
|
|
2019-03-08 08:51:37 -05:00
|
|
|
if self.src_cmd is not None:
|
2019-05-29 05:44:18 -04:00
|
|
|
yield self.src_cmd + self.filtergraph
|
2018-01-07 07:58:45 -05:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2019-09-09 10:13:18 -04:00
|
|
|
"""
|
|
|
|
pipe ffmpeg pre-process to final ffmpeg post-process,
|
|
|
|
or play with ffplay
|
|
|
|
"""
|
2019-03-10 16:24:03 -04:00
|
|
|
year = get_date(False).split('-')[0]
|
2019-08-09 11:31:39 -04:00
|
|
|
overlay = []
|
2019-05-27 04:31:26 -04:00
|
|
|
|
2019-09-08 15:24:02 -04:00
|
|
|
ff_pre_settings = [
|
|
|
|
'-pix_fmt', 'yuv420p', '-r', str(_pre_comp.fps),
|
|
|
|
'-c:v', 'mpeg2video', '-intra',
|
|
|
|
'-b:v', '{}k'.format(_pre_comp.v_bitrate),
|
|
|
|
'-minrate', '{}k'.format(_pre_comp.v_bitrate),
|
|
|
|
'-maxrate', '{}k'.format(_pre_comp.v_bitrate),
|
|
|
|
'-bufsize', '{}k'.format(_pre_comp.v_bufsize),
|
2019-09-11 17:20:24 -04:00
|
|
|
'-c:a', 's302m', '-strict', '-2', '-ar', '48000', '-ac', '2',
|
2019-09-08 15:24:02 -04:00
|
|
|
'-f', 'mpegts', '-']
|
|
|
|
|
|
|
|
if os.path.isfile(_text.textfile):
|
|
|
|
logger.info('Overlay text file: "{}"'.format(_text.textfile))
|
|
|
|
overlay = [
|
|
|
|
'-vf', ("drawtext=box={}:boxcolor='{}':boxborderw={}"
|
|
|
|
":fontsize={}:fontcolor={}:fontfile='{}':textfile={}"
|
|
|
|
":reload=1:x='{}':y='{}'").format(
|
|
|
|
_text.box, _text.boxcolor, _text.boxborderw,
|
|
|
|
_text.fontsize, _text.fontcolor, _text.fontfile,
|
|
|
|
_text.textfile, _text.x, _text.y)
|
|
|
|
]
|
2019-06-07 10:42:11 -04:00
|
|
|
|
2018-01-07 07:58:45 -05:00
|
|
|
try:
|
2019-10-15 08:46:30 -04:00
|
|
|
if _playout.preview or stdin_args.desktop:
|
2019-05-27 04:31:26 -04:00
|
|
|
# preview playout to player
|
2019-06-03 15:33:29 -04:00
|
|
|
encoder = Popen([
|
2019-06-07 10:42:11 -04:00
|
|
|
'ffplay', '-hide_banner', '-nostats', '-i', 'pipe:0'
|
2019-09-08 15:24:02 -04:00
|
|
|
] + overlay, stderr=None, stdin=PIPE, stdout=None)
|
2019-05-27 04:31:26 -04:00
|
|
|
else:
|
2019-09-08 15:24:02 -04:00
|
|
|
encoder = Popen([
|
|
|
|
'ffmpeg', '-v', 'info', '-hide_banner', '-nostats',
|
|
|
|
'-re', '-thread_queue_size', '256',
|
|
|
|
'-i', 'pipe:0'] + overlay + _playout.post_comp_video
|
|
|
|
+ _playout.post_comp_audio + [
|
2019-05-27 04:31:26 -04:00
|
|
|
'-metadata', 'service_name=' + _playout.name,
|
|
|
|
'-metadata', 'service_provider=' + _playout.provider,
|
2019-05-28 15:22:10 -04:00
|
|
|
'-metadata', 'year={}'.format(year)
|
2019-09-08 15:24:02 -04:00
|
|
|
] + _playout.post_comp_extra + [_playout.out_addr], stdin=PIPE)
|
2018-01-07 07:58:45 -05:00
|
|
|
|
2019-08-09 11:55:21 -04:00
|
|
|
if _playlist.mode and not stdin_args.folder:
|
2019-06-07 03:40:32 -04:00
|
|
|
watcher = None
|
2019-06-06 12:16:33 -04:00
|
|
|
get_source = GetSourceIter(encoder)
|
|
|
|
else:
|
2019-06-07 07:31:47 -04:00
|
|
|
logger.info("start folder mode")
|
2019-09-03 15:14:40 -04:00
|
|
|
media = MediaStore()
|
2019-09-08 15:24:02 -04:00
|
|
|
watcher = MediaWatcher(media)
|
|
|
|
get_source = GetSource(media)
|
2019-05-28 15:48:53 -04:00
|
|
|
|
2019-06-06 16:02:20 -04:00
|
|
|
try:
|
|
|
|
for src_cmd in get_source.next():
|
2019-09-06 06:02:11 -04:00
|
|
|
logger.debug('src_cmd: "{}"'.format(src_cmd))
|
2019-06-06 16:02:20 -04:00
|
|
|
if src_cmd[0] == '-i':
|
|
|
|
current_file = src_cmd[1]
|
|
|
|
else:
|
|
|
|
current_file = src_cmd[3]
|
|
|
|
|
|
|
|
logger.info('play: "{}"'.format(current_file))
|
|
|
|
|
|
|
|
with Popen([
|
|
|
|
'ffmpeg', '-v', 'error', '-hide_banner', '-nostats'
|
|
|
|
] + src_cmd + ff_pre_settings,
|
|
|
|
stdout=PIPE) as decoder:
|
|
|
|
copyfileobj(decoder.stdout, encoder.stdin)
|
2019-06-07 03:40:32 -04:00
|
|
|
|
|
|
|
except BrokenPipeError:
|
|
|
|
logger.error('Broken Pipe!')
|
|
|
|
terminate_processes(decoder, encoder, watcher)
|
|
|
|
|
|
|
|
except SystemExit:
|
|
|
|
logger.info("got close command")
|
|
|
|
terminate_processes(decoder, encoder, watcher)
|
|
|
|
|
|
|
|
except KeyboardInterrupt:
|
2019-06-06 16:02:20 -04:00
|
|
|
logger.warning('program terminated')
|
2019-06-07 03:40:32 -04:00
|
|
|
terminate_processes(decoder, encoder, watcher)
|
2019-05-27 04:31:26 -04:00
|
|
|
|
2019-06-24 09:01:49 -04:00
|
|
|
# close encoder when nothing is to do anymore
|
|
|
|
if encoder.poll() is None:
|
|
|
|
encoder.terminate()
|
|
|
|
|
2018-01-07 07:58:45 -05:00
|
|
|
finally:
|
2019-06-03 15:33:29 -04:00
|
|
|
encoder.wait()
|
2018-01-07 07:58:45 -05:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2019-08-09 11:55:21 -04:00
|
|
|
if not _playlist.mode or stdin_args.folder:
|
2019-06-07 04:16:39 -04:00
|
|
|
from watchdog.events import PatternMatchingEventHandler
|
|
|
|
from watchdog.observers import Observer
|
|
|
|
|
2018-01-07 07:58:45 -05:00
|
|
|
main()
|