Compare commits

..

1 Commits

Author SHA1 Message Date
jb-alvarado
519339f593
Merge pull request #118 from ffplayout/master
update
2022-04-14 15:03:00 +02:00
259 changed files with 4837 additions and 162920 deletions

View File

@ -1,2 +0,0 @@
[env]
TS_RS_EXPORT_DIR = { value = "frontend/types", relative = true }

2
.github/FUNDING.yml vendored
View File

@ -1,5 +1,3 @@
# These are supported funding model platforms # These are supported funding model platforms
github: [jb-alvarado] github: [jb-alvarado]
custom: PayPal.Me/jonaBaec
open_collective: ffplayout

View File

@ -7,40 +7,28 @@ assignees: ''
--- ---
<!-- **Describe the bug**
Note: use this template only when you have a bug to report!
-->
### Describe the bug
<!--
A clear and concise description of what the bug is. A clear and concise description of what the bug is.
-->
### To Reproduce
**To Reproduce**
Steps to reproduce the behavior: Steps to reproduce the behavior:
1. Go to '...' 1. Go to '...'
2. Click on '....' 2. Click on '....'
3. Scroll down to '....' 3. Scroll down to '....'
4. See error 4. See error
### Expected behavior **Expected behavior**
<!--
A clear and concise description of what you expected to happen. A clear and concise description of what you expected to happen.
-->
### Desktop/Server/Software (please complete the following information): **Desktop/Server/Software (please complete the following information):**
- OS: [e.g. debian 10]
- OS: [e.g. debian 12] - python version
- ffplayout version
- ffmpeg version - ffmpeg version
- are you using the current master of ffplayout? - are you using the current master of ffplayout?
### Config Settings: **Config Settings:**
- command line arguments - command line arguments
- config fie - config fie
### Logging: **Logging:**
- content of: ffplayout.log, decoding.log and encoding.log
- content of: ffplayout.log

View File

@ -1,26 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: '[Enhancement] <!--FEATURE NAME-->'
labels: enhancement
---
<!--
Note: use this template only when you have a feature request!
-->
### Feature description
<!--
A clear and concise description of what the feature should do.
-->
### The problem in the current version
<!--
What exactly is currently missing?
-->
### Alternative ways
<!--
What have you already tried to solve this problem?
-->

View File

@ -1,12 +0,0 @@
name: Autocloser
on: [issues]
jobs:
autoclose:
runs-on: ubuntu-latest
steps:
- name: Autoclose issues that did not follow issue template
uses: roots/issue-closer@v1.2
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
issue-close-message: "@${issue.user.login} this issue was automatically closed because it did not follow the issue template. Please read [CONTRIBUTING.md](https://github.com/ffplayout/ffplayout/blob/master/CONTRIBUTING.md) for more informations."
issue-pattern: ".*### Describe the bug([\\s\\S]*?)### To Reproduce.*|### Feature description.*"

67
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@ -0,0 +1,67 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '38 5 * * 3'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

32
.github/workflows/pythonapp.yml vendored Normal file
View File

@ -0,0 +1,32 @@
name: Python application
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Set up Python 3.9
uses: actions/setup-python@v1
with:
python-version: 3.9
- name: Install dependencies
run: |
sudo apt update
sudo apt install ffmpeg
python -m pip install --upgrade pip
pip install -r requirements-base.txt
pip install -r requirements-dev.txt
- name: Lint with flake8
run: |
pip install flake8
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest -vv

View File

@ -1,36 +0,0 @@
name: rust
on: [push, pull_request]
jobs:
build:
strategy:
matrix:
os: [ubuntu-latest, macOS-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- name: Set Build Tools and update Rust
run: |
rustup update stable
rustup component add rustfmt
rustup component add clippy
- name: Init Submodules
run: |
git submodule update --init --recursive
- name: Use ffmpeg on Linux
if: ${{ matrix.os == 'ubuntu-latest' }}
uses: FedericoCarboni/setup-ffmpeg@v2
- name: Tests on Linux
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
cargo test --all-features
cargo clippy --all-features --all-targets -- --deny warnings
cargo fmt --all -- --check
- name: Run build on ${{ matrix.os }}
run: cargo build --all-features

72
.gitignore vendored
View File

@ -1,59 +1,13 @@
# Generated by Cargo .ropeproject
# will have compiled files and executables **temp
/target/ **playlists
*.log*
# These are backup files generated by rustfmt .DS_Store
**/*.rs.bk __pycache__/
*-orig.*
# exclude binarys in examples folder *.json
/examples/* test/
!/examples/*.rs .pytest_cache/
*.db* venv/
log/
# exlcude logging .mypy_cache/
*.log
/logs/
*.exe
*.zip
*tar.gz
*.deb
*.rpm
ffplayout.1.gz
/assets/*.db*
/dist/
data/
/public/
tmp/
assets/playlist_template.json
advanced*.toml
ffplayout*.toml
template.json
# frontend stuff
node_modules
.nuxt
.nitro
.cache
.output
.env
dist
.eslintcache
*.tgz
.yarn-integrity
sw.*
.DS_Store
*.swp
master.m3u8
tv-media
tv-media/
Videos
Videos/
*.tar*
home
home/
live1
live1/
Musik
Musik/
test.vue

View File

@ -1,8 +0,0 @@
assets/
debian/
docker/
docs/
frontend/
migrations/
scripts/
tests/

606
.pylintrc Normal file
View File

@ -0,0 +1,606 @@
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-whitelist=
# Specify a score threshold to be exceeded before program exits with error.
fail-under=10.0
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=print-statement,
parameter-unpacking,
unpacking-in-except,
old-raise-syntax,
backtick,
long-suffix,
old-ne-operator,
old-octal-literal,
import-star-module-level,
non-ascii-bytes-literal,
raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
apply-builtin,
basestring-builtin,
buffer-builtin,
cmp-builtin,
coerce-builtin,
execfile-builtin,
file-builtin,
long-builtin,
raw_input-builtin,
reduce-builtin,
standarderror-builtin,
unicode-builtin,
xrange-builtin,
coerce-method,
delslice-method,
getslice-method,
setslice-method,
no-absolute-import,
old-division,
dict-iter-method,
dict-view-method,
next-method-called,
metaclass-assignment,
indexing-exception,
raising-string,
reload-builtin,
oct-method,
hex-method,
nonzero-method,
cmp-method,
input-builtin,
round-builtin,
intern-builtin,
unichr-builtin,
map-builtin-not-iterating,
zip-builtin-not-iterating,
range-builtin-not-iterating,
filter-builtin-not-iterating,
using-cmp-argument,
eq-without-hash,
div-method,
idiv-method,
rdiv-method,
exception-message-attribute,
invalid-str-codec,
sys-max-int,
bad-python3-import,
deprecated-string-function,
deprecated-str-translate-call,
deprecated-itertools-function,
deprecated-types-field,
next-method-defined,
dict-items-not-iterating,
dict-keys-not-iterating,
dict-values-not-iterating,
deprecated-operator-function,
deprecated-urllib-function,
xreadlines-attribute,
deprecated-sys-function,
exception-escape,
comprehension-escape,
too-few-public-methods,
logging-fstring-interpolation,
too-many-instance-attributes,
missing-function-docstring,
import-error,
consider-using-with
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=new
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
#notes-rgx=
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=LINGER,REQ,ROUTER,NOBLOCK
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: de_AT (hunspell), de_BE
# (hunspell), de_CH (hunspell), de_DE (hunspell), de_LI (hunspell), de_LU
# (hunspell), en_AG (hunspell), en_AU (hunspell), en_BS (hunspell), en_BW
# (hunspell), en_BZ (hunspell), en_CA (hunspell), en_DK (hunspell), en_GB
# (hunspell), en_GH (hunspell), en_HK (hunspell), en_IE (hunspell), en_IN
# (hunspell), en_JM (hunspell), en_MW (hunspell), en_NA (hunspell), en_NG
# (hunspell), en_NZ (hunspell), en_PH (hunspell), en_SG (hunspell), en_TT
# (hunspell), en_US (hunspell), en_ZA (hunspell), en_ZM (hunspell), en_ZW
# (hunspell).
spelling-dict=en_US
# List of comma separated words that should not be checked.
spelling-ignore-words=rtmp,srs,supervisord,xmlrpclib,systemd,zmq,ffmpeg,CSS,JWT,
auth,Django,django,ffplayout,startproject,playlist,playlists,
http,www,init,json,cmd,config,configs,loudnorm,stderr,stdout,
ctrl,ffprobe,yaml,HH,MM,SS,libs,mediainfo,formatter,hls,HLS,
realtime,cutted,pillarbox,deinterlacing,drawtext,pre,rtp,svt,
ffplay,codecs
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[DESIGN]
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=15
# Maximum number of locals for function / method body.
max-locals=25
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=60
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled).
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled).
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=BaseException,
Exception

View File

@ -1,13 +0,0 @@
{
"recommendations": [
"bradlc.vscode-tailwindcss",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"hollowtree.vue-snippets",
"rust-lang.rust-analyzer",
"statiolake.vscode-rustfmt",
"tamasfe.even-better-toml",
"vue.volar",
"wscats.vue",
]
}

96
.vscode/settings.json vendored
View File

@ -1,96 +0,0 @@
{
"eslint.useFlatConfig": true,
"prettier.tabWidth": 4,
"prettier.printWidth": 120,
"vue3snippets.semi": false,
"vue3snippets.singleQuote": true,
"vue3snippets.jsxSingleQuote": true,
"vue3snippets.printWidth": 120,
"vue3snippets.tabWidth": 4,
"prettier.jsxSingleQuote": true,
"prettier.semi": false,
"prettier.singleQuote": true,
"rust-analyzer.cargo.target": null,
"rust-analyzer.checkOnSave": true,
"rust-analyzer.cargo.buildScripts.overrideCommand": null,
"rust-analyzer.rustfmt.overrideCommand": null,
"rust-analyzer.inlayHints.chainingHints.enable": false,
"rust-analyzer.inlayHints.parameterHints.enable": false,
"rust-analyzer.inlayHints.typeHints.enable": false,
"rust-analyzer.diagnostics.disabled": ["unresolved-proc-macro"],
"[dockercompose]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[html]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[scss]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[vue]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[rust]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "statiolake.vscode-rustfmt"
},
"[yaml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"cSpell.words": [
"actix",
"aevalsrc",
"afade",
"apad",
"boxborderw",
"boxcolor",
"canonicalize",
"cgop",
"coeffs",
"ffpengine",
"flexi",
"fontcolor",
"fontfile",
"fontsize",
"httpauth",
"ifnot",
"keyint",
"lettre",
"libc",
"libx",
"libzmq",
"maxrate",
"minrate",
"muxdelay",
"muxer",
"muxpreload",
"n'vtt",
"neli",
"nuxt",
"paris",
"Referer",
"reqwest",
"rsplit",
"RTSP",
"rustls",
"scenecut",
"sqlite",
"sqlx",
"starttls",
"tokio",
"tpad",
"unistd",
"uuids",
"webm",
"zerolatency"
]
}

View File

@ -1,406 +0,0 @@
# Changelog
## [0.20.3](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.3) (2024-01-03)
### ffplayout
- improve live sources [9912405](https://github.com/ffplayout/ffplayout/commit/9912405e4e976b99be9d174fa9cc54700984d5a9)
- update sysinfo to support stats on network storage [8737769](https://github.com/ffplayout/ffplayout/commit/873776908e10b2eb9d92fb743a578a848e95c49c)
### Documentation
- fix API examples [c8ca4588d](https://github.com/ffplayout/ffplayout/commit/c8ca4588d178b1f94f5c7dce40fd4a07a10a695b)
## [0.20.2](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.2) (2023-12-16)
### ffplayout
- better error message [5c14b89](https://github.com/ffplayout/ffplayout/commit/5c14b895f2c8e34990097354fea860a5030a5732)
- warn and adjust duration on validation [a30f21b](https://github.com/ffplayout/ffplayout/commit/a30f21b86688fbf4de477279217ca3a739409719)
### ffpapi
- thread block on hashing [4c4199cb](https://github.com/ffplayout/ffplayout/commit/4c4199cbdb0836d69d67fd6dee1869fb08eeffbf)
- remove salt from table [15f41148](https://github.com/ffplayout/ffplayout/commit/15f41148dfb26ccaea159f5c5305a966cf81b1c4)
### frontend
- possibility to preview live/html sources [5881527](https://github.com/ffplayout/ffplayout/pull/472/commits/5881527fc571feccaee7f7f1877750ccc44516f5)
## [0.20.1](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.1) (2023-12-03)
### ffplayout
- add silence detection for validation [ea83160](https://github.com/ffplayout/ffplayout/commit/ea83160ba63bb8723de1f004f6449b37a1ea2593)
- loop separate audio when is to short [94e02ac](https://github.com/ffplayout/ffplayout/commit/94e02ac3678c0f8cdec97002f30e08beb45e748b)
- add probe object in validation thread, to hopefully reduce latency and reduce unneeded file access [0330ad6](https://github.com/ffplayout/ffplayout/commit/0330ad61681a4cb576d4a46365c8cdffdfc96379)
### ffpapi
- update actix-web-grants to v4 [f1e87975](https://github.com/ffplayout/ffplayout/commit/f1e8797528e649aac6de85d897b7c03b8007a2b3)
### frontend
- call system status only when app is not hidden [3f22297](https://github.com/ffplayout/ffplayout/commit/3f222975c16580deeeedaa2e0721e4a312e7c8fb)
- select, edit and delete user [f86a6c3](https://github.com/ffplayout/ffplayout/commit/f86a6c3f1dfb8ec5f3c8e74714b8eecda2b443c3)
- global middleware [c60d60d](https://github.com/ffplayout/ffplayout/commit/c60d60d9b3f74095034760f22876aed877e0464f)
## [0.20.0](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.0) (2023-11-16)
### ffplayout
- run task on clip change, #276 [5bd1b2](https://github.com/ffplayout/ffplayout/commit/5bd1b23513d3cb0a9f6574626032acdd6627e790)
- support filler folder [98d1d5](https://github.com/ffplayout/ffplayout/commit/98d1d5d606b3f90ebeb1f0cd54156ee820272dd2) [04353a](https://github.com/ffplayout/ffplayout/commit/04353a984d43e1059ee9808ee08700e8c5e1cb8b)
- support log level as cmd argument [334f84](https://github.com/ffplayout/ffplayout/commit/334f842d1923e7150f0ed504fa85f4936c0213d7)
- add stream copy mode, fix #324 [b44efd](https://github.com/ffplayout/ffplayout/commit/b44efde8f1a771122c10f79e1a5da8ba724acd56)
- replace realtime filter with readrate parameter for hls mode [4b18d41](https://github.com/ffplayout/ffplayout/commit/4b18d414b7437f48a3663e9e9b547e83ab605cda) (**!WARNING:** older ffmpeg versions will not work anymore! Now 5.0+ is needed.)
- choice audio track index, fix #348 [1bfff2](https://github.com/ffplayout/ffplayout/commit/1bfff27b4b46405b52a428b38bd00fe4e9c3f78d)
- fix boxborderw value [fef7d0](https://github.com/ffplayout/ffplayout/commit/fef7d04e65b6275b6bb6c5b813c83b8641051882)
- stop decoder with SIGTERM signal, instead of kill on non windows systems [d2c72d](https://github.com/ffplayout/ffplayout/commit/d2c72d56fe0cc1cced14f8d1d1746f5224011499)
- generate playlists based on template [0c51f8](https://github.com/ffplayout/ffplayout/commit/0c51f8303cd3eacdec8a0ac3abe9edd69e2271c2)
- update chrono and fix audit warning [83cff6](https://github.com/ffplayout/ffplayout/commit/83cff609b3709f4621af506de2f8546099b8848c)
- jump out from source loop when playout is terminated [cf6e56](https://github.com/ffplayout/ffplayout/commit/cf6e5663e98eb52bc84c0e9e5856943ddefc24d9)
- fix program hang when mail sending not work [38e73a](https://github.com/ffplayout/ffplayout/commit/38e73a0138430fc600ae809356127941e1f08eb2)
### ffpapi
- embed static files from frontend in ffpapi, add db path argument [b4cde6e](https://github.com/ffplayout/ffplayout/commit/b4cde6e12ce70af20f52f308d7cb4288f97d31fe)
- Use enum for Role everywhere [7d31735](https://github.com/ffplayout/ffplayout/commit/7d3173533fd8b2a9d6e718ada0c81f017aedc777)
- get config also as normal user [7d31735](https://github.com/ffplayout/ffplayout/commit/7d3173533fd8b2a9d6e718ada0c81f017aedc777)
- fix time shift [7d31735](https://github.com/ffplayout/ffplayout/commit/7d3173533fd8b2a9d6e718ada0c81f017aedc777)
- add option for public path [c304386](https://github.com/ffplayout/ffplayout/commit/c30438697d33fe360e92146c03ad8ce212e138a6)
- add system stat route [c304386](https://github.com/ffplayout/ffplayout/commit/c30438697d33fe360e92146c03ad8ce212e138a6)
### frontend
- option to add user [debb75](https://github.com/ffplayout/ffplayout/commit/debb751428239f2d0ac446a0b9a805cd1ec4a965)
- fix audit alert, get status from playout stat [50bee9](https://github.com/ffplayout/ffplayout-frontend/commit/50bee93c8555b14181864a654239f7e68c50cafb)
- restart modal for config save [2f3234](https://github.com/ffplayout/ffplayout-frontend/commit/2f3234221a0aef8e70d9e2b5e9bbfb1fe51921fc)
- add advanced playlist generator, update packages [806d53](https://github.com/ffplayout/ffplayout-frontend/commit/806d533bc2a84fc994897371071c4399172fa639)
- add dashboard [ba0c0fa](https://github.com/ffplayout/ffplayout/pull/446/commits/ba0c0faaac9c44fbf4f87752c89aaa8859be9bf1)
## [0.19.1](https://github.com/ffplayout/ffplayout/releases/tag/v0.19.1) (2023-10-08)
### ffplayout
- remove openssl dependencies [813e48f](https://github.com/ffplayout/ffplayout/commit/813e48fd54a6482eb09ec418e507733d689663d9)
- update packages [0808fb](https://github.com/ffplayout/ffplayout/commit/0808fb29ab8db17cf1d251336cc90c1db7aa92e0)
### frontend
- fix preview in player #397 [943cf9](https://github.com/ffplayout/ffplayout/commit/943cf90e15edc0efdb9abf0703cc6addbd3dfecc)
## [0.19.0](https://github.com/ffplayout/ffplayout/releases/tag/v0.19.0) (2023-07-19)
### ffplayout
- cleanup and update docker files, migrate to notify 6.0 [5502c45](https://github.com/ffplayout/ffplayout/commit/5502c45420a12b63c05493b2c69d4b6cdd0b044e)
- switch jsonrpc-http-server to tiny_http, update clap to next major version [8eb5c2b](https://github.com/ffplayout/ffplayout/commit/8eb5c2ba0280eeed25231e3379c88a9bfb47334c) [2b4fbff](https://github.com/ffplayout/ffplayout/commit/2b4fbff2dcbb23714b2fd851931df9c0fa15221c)
- The jsonrpc-http-server don't get any updates anymore and some libs are already unmaintained. Migration to the new jsonrpsee makes not so much sense, because its features are not needed. For our needs tiny_http is absolut enough.
- set chrono features, cleanup, less logging [4a578e8](https://github.com/ffplayout/ffplayout/commit/4a578e83ffd4a8897521c45c5d1804eb961fec72)
- deserialize numbers to string for drawtext filter [c02241f](https://github.com/ffplayout/ffplayout/commit/c02241ffe8126e761ba9440c41e2d2f181ca40ea)
- add doc strings to rpc server [25e2ed7](https://github.com/ffplayout/ffplayout/commit/25e2ed739091f4de444110cdaf6f639b14397e86) [7c398c5](https://github.com/ffplayout/ffplayout/commit/7c398c5e556ca00140080bbe9fd4f424fe8d867a)
- run service inside docker as root, fix #329 [c4d5aec](https://github.com/ffplayout/ffplayout/commit/c4d5aec63e81db7706e21e7b4f7198073008538e)
- add duration from remote source, #336 [a15c8a0](https://github.com/ffplayout/ffplayout/commit/a15c8a01ba05749036048bda26ccb3918e1ce7af)
- don't log missing source when playlist is to short add validate playlist option [83432e](https://github.com/ffplayout/ffplayout/commit/83432ef6735c5058a2251f76e0cee51d323ec774)
- debug log config path [40fd1c4](https://github.com/ffplayout/ffplayout/commit/40fd1c4751f46ae3630965095329a7832548d304)
- check if json rpc port is in use [ac90dcb](https://github.com/ffplayout/ffplayout/commit/ac90dcb157784a3b98990140f5535622a6689e65)
- fix ffmpeg zombies in HLS mode [972567a](https://github.com/ffplayout/ffplayout/commit/972567afa6e0b868e5114c40b00c7a620014d09a)
### ffpapi
- update sqlx to 0.7 [cd4c872](https://github.com/ffplayout/ffplayout/commit/cd4c8727bd0e908eb3f23e73b35f56ccda5938d1)
- rename hls output, fix #351 [acfe223](https://github.com/ffplayout/ffplayout/commit/acfe223301fd3d70cc358159dff122fc149bc32e)
### frontend
- fix empty remote names [968de86](https://github.com/ffplayout/ffplayout/commit/968de862f4d4f3348125fc5bd1be60f0cbcb6627)
- fix type errors [eca9507](https://github.com/ffplayout/ffplayout/commit/eca9507a1fc9c40c4de9c368ec72fd4a90e82c12)
- fix http-flv player, #349 [bf993a1](https://github.com/ffplayout/ffplayout/commit/bf993a13329204f74d50fd405afbe900859b4a95)
- watch channel change on player page, #351 [50204ce](https://github.com/ffplayout/ffplayout/commit/50204ce3815d52214a426ed3e93178117ad3be2c)
- update nuxtjs to 3.6.3 [5dd450e](https://github.com/ffplayout/ffplayout/pull/358/commits/5dd450e90c2151ebc37447c8659251c344da75be)
### Development
- init or update submodules [cd8a039](https://github.com/ffplayout/ffplayout/commit/cd8a039a6d7873eea6456564dff9ea3244005457)
### Documentation
- format text from Readme [26a7ac0](https://github.com/ffplayout/ffplayout/commit/26a7ac02b06cf2094f39c4ed5ce7990f83d69c28)
- simplify preview streaming example [6ca710d](https://github.com/ffplayout/ffplayout/commit/6ca710ded68e107acdb47f029ba4d0f33460ac2b)
## [0.18.4](https://github.com/ffplayout/ffplayout/releases/tag/v0.18.4) (2023-06-25)
### ffplayout
- fix player control in HLS Mode [ec33cdb](https://github.com/ffplayout/ffplayout/commit/ec33cdb30944ab19c028a085fcb6d974ec4e81be)
## [0.18.3](https://github.com/ffplayout/ffplayout/releases/tag/v0.18.3) (2023-06-16)
### ffpapi
- remove extra content type from header, fix [#331](https://github.com/ffplayout/ffplayout/issues/331)
## [0.18.2](https://github.com/ffplayout/ffplayout/releases/tag/v0.18.2) (2023-06-13)
### ffplayout
- update version, create dir with ignore error [2da9d1a](https://github.com/ffplayout/ffplayout/pull/327/commits/2da9d1a85d7ca3695022a74d79cf362a10e19705)
- add postrm, fix #326 [97455d5](https://github.com/ffplayout/ffplayout/pull/328/commits/97455d535c6214b04eca14812029ced23c7524e1)
## [0.18.1](https://github.com/ffplayout/ffplayout/releases/tag/v0.18.1) (2023-06-11)
### frontend
- update bootstrap to stable version [7f10e90](https://github.com/ffplayout/ffplayout/pull/325/commits/7f10e9013aabd44cb5d01193db4b10b0884c0cb3)
- fix config save [abf3d89](https://github.com/ffplayout/ffplayout/pull/325/commits/abf3d897a1df1fa35f06362f2e26d4ae1217bda4)
- hide chunk size waring [63d2849](https://github.com/ffplayout/ffplayout/pull/325/commits/63d28494d5ba263cf7e26d2022f990190cb8f6c2)
### ffplayout
- update packages [7f10e90](https://github.com/ffplayout/ffplayout/pull/325/commits/7f10e9013aabd44cb5d01193db4b10b0884c0cb3) [8dd8865](https://github.com/ffplayout/ffplayout/pull/325/commits/8dd886547bad342b8c000a63c025419583d8003f)
- remove redundant clone [d6baccf](https://github.com/ffplayout/ffplayout/pull/325/commits/d6baccf3a7ff645a8ca2938d782f2eee0ec08eb3)
## [0.18.0](https://github.com/ffplayout/ffplayout/releases/tag/v0.18.0) (2023-05-28)
### frontend
- mark and scroll to current clip, show when ingest is running [676d71e](https://github.com/ffplayout/ffplayout/commit/676d71e9b7ca37b1b40f1007f242023d49eed63b)
- split extensions to array, fix #318 [5871d09](https://github.com/ffplayout/ffplayout/commit/5871d092af020c278f267a57ef49c592f39ecd79)
### ffplayout
- remove loudnorm filter [535511f](https://github.com/ffplayout/ffplayout/commit/535511f394a98441be15fc62090340e94b2f5018)
- quality is to bad
- no regex match validation for scale filter [d1ce475](https://github.com/ffplayout/ffplayout/commit/d1ce4756924e4cfc969db91adaadfcd88c195dd0)
- try to create log path, if not exists. expose state file in config (important for multi channels) [6cd092c](https://github.com/ffplayout/ffplayout/commit/6cd092c30fd7c22428c0c0792987ef419a781ff5)
### ffpapi
- update most importend config values on new channel [6338207](https://github.com/ffplayout/ffplayout/commit/6338207fba9f217f144cb75afc764c16e5e3223e)
## [0.17.1](https://github.com/ffplayout/ffplayout/releases/tag/v0.17.1) (2023-04-07)
### frontend
- fix upload function [5e976f2](https://github.com/ffplayout/ffplayout/pull/310/commits/5e976f212b47d572839e01ee73dfb632fbe1a70c)
- update bootstrap to 5.3.0 alpha 3 [8024a99](https://github.com/ffplayout/ffplayout/pull/310/commits/8024a990a651920ba2244f6b120ecff9701c79d2)
## [0.17.0](https://github.com/ffplayout/ffplayout/compare/v0.16.7...v0.17.0) (2023-03-28)
### ffpapi
- use extensions from config and extra_extension from frontend [e363077](https://github.com/ffplayout/ffplayout/commit/e363077d30c47bb42adf39728f0f961cf1cee903)
- support folder list for playlist generation [e752a7a](https://github.com/ffplayout/ffplayout/commit/e752a7a95110b35d29538b8b2221e3f79c065b31)
- add piggyback mode [7e5a391](https://github.com/ffplayout/ffplayout/commit/7e5a391e3d77f67b243026d3c4c1fded583cd2d9) [6c5264e](https://github.com/ffplayout/ffplayout/commit/6c5264ea5fe123b0718a4525605761ee1971ffae)
### ffplayout
- fix v_in in custom filter [537f664](https://github.com/ffplayout/ffplayout/commit/537f664c067a122e31c06c196d354dc4bfd7fed3)
- add audio only mode [537f664](https://github.com/ffplayout/ffplayout/commit/537f664c067a122e31c06c196d354dc4bfd7fed3)
- get correct error level from config [c0740fc](https://github.com/ffplayout/ffplayout/commit/c0740fc8303f08cb57fe956de805290a705e5a28)
- fix logo path on windows system #291 [3328aaa](https://github.com/ffplayout/ffplayout/commit/3328aaac6a6b814ce491ff3c07e580136ea453dd)
### frontend
- rewrite frontend to nuxtjs 3
### Development
- update ffmpeg action [b2093dd](https://github.com/ffplayout/ffplayout/commit/b2093ddf352964115c11bc09c2849c8491ee1156)
- set version and other metadata globally [3b61d09](https://github.com/ffplayout/ffplayout/commit/3b61d09809db4c6e1c02c5b8f0bb22eab9f4568d)
- another filter test [0ed6add](https://github.com/ffplayout/ffplayout/commit/0ed6add25fe43b6137400fbbda68b641c766f734)
- fix "error: unpacking of archive failed: cpio: Bad magic" [8a2e1e7](https://github.com/ffplayout/ffplayout/commit/8a2e1e7d3dccf76a078a16e3845bd0b4398d2f3f)
### Documentation
- add infos about ingest errors [c57d497](https://github.com/ffplayout/ffplayout/commit/c57d497dee9afb47a9a388d4c72feeafd55f8867)
- update install instruction [d9952c8](https://github.com/ffplayout/ffplayout/commit/d9952c88fc5a50fd797161745ad0e95ec79099ef)
- add docker documentation [505ae23](https://github.com/ffplayout/ffplayout/commit/505ae23a1c2d69837ec1075be33d10f91eb6363f)
## [0.16.7](https://github.com/ffplayout/ffplayout/compare/v0.16.6...v0.16.7) (2022-12-20)
### ffplayout
- log error only when fdk_aac is in use [8ac3688](https://github.com/ffplayout/ffplayout/pull/249/commits/8ac3688d2bd178db9b5a54efa1bde4e688432564)
- make libx264 optional [1491f46](https://github.com/ffplayout/ffplayout/pull/249/commits/1491f46e3dbb9a4dfa14fe2ab3680c6d0cc89b3d)
- catch empty program list in [#101](https://github.com/ffplayout/ffplayout-frontend/issues/101) [850a48e](https://github.com/ffplayout/ffplayout/pull/249/commits/850a48ed43a671e7a0d924510b80592e489fff94)
- update packages, set correct port [a3ce014](https://github.com/ffplayout/ffplayout/pull/249/commits/a3ce014444672704d9c33af6f6105f57c40a544d)
### frontend
- remove dotenv, update packages [3ddec8c](https://github.com/ffplayout/ffplayout/pull/249/commits/3ddec8cf19db692412c603718038cf3f0ffa7815), should fix [#101](https://github.com/ffplayout/ffplayout-frontend/issues/101)
- suppress 408 error [b812de9](https://github.com/ffplayout/ffplayout/pull/249/commits/b812de97470fd21e8734fe6e5282cc0e871384ca)
## [0.16.6](https://github.com/ffplayout/ffplayout/compare/v0.16.5...v0.16.6) (2022-12-17)
### ffplayout
- add logo scale
- add optional ingest_level
- set windows title in desktop mode [f388820](https://github.com/ffplayout/ffplayout/commit/f38882032f809f094cef895beff07582f0fe9b8f)
### Development
- migrate to Rust 1.66.0 [f388820](https://github.com/ffplayout/ffplayout/commit/f38882032f809f094cef895beff07582f0fe9b8f)
- update packages [f388820](https://github.com/ffplayout/ffplayout/commit/f38882032f809f094cef895beff07582f0fe9b8f)
## [0.16.5](https://github.com/ffplayout/ffplayout/compare/v0.16.4...v0.16.5) (2022-11-28)
### ffpapi
- init db needs its own connection, fix #241 [edfff82](https://github.com/ffplayout/ffplayout/commit/edfff8269b660ef149023d859451b94c198474ba)
### ffplayout
- change StartLimitIntervalSec in systemd service [010fc29](https://github.com/ffplayout/ffplayout/commit/010fc29b38129e503c12b80a6710dccd90056851)
- get list of filters and libs for future usage (#201 #219) [52856d3](https://github.com/ffplayout/ffplayout/commit/52856d3f0945cae310b7bbae39ae0a5626b4822f)
## [0.16.4](https://github.com/ffplayout/ffplayout/compare/v0.16.3...v0.16.4) (2022-11-21)
### ffpapi
- add enpoint for gettting program infos, mainly usefull for generating xmltv [f576ded](https://github.com/ffplayout/ffplayout/commit/f576dedcb9ebac259ec2283a622cd521a2f614b8); [aa820b2](https://github.com/ffplayout/ffplayout/commit/aa820b29c2be0b2b6c946466ca6a274e8771ce4d); [0d87bae](https://github.com/ffplayout/ffplayout/commit/0d87baece7df9dfb40969a5b34c5cf944967aba0)
- use only one DB pool and share them with web::Data [a5f0813](https://github.com/ffplayout/ffplayout/commit/a5f0813d2acd1a41b132d114fa4dfa1ea6150c45); [4122aaa](https://github.com/ffplayout/ffplayout/commit/4122aaa7a6b52038dc61f643deeb30fdeee7e09e); [5780de3](https://github.com/ffplayout/ffplayout/commit/5780de38c40429402ead35d2815aa3d99feaa3be)
### ffplayout
- limit restart count from systemd service [694c9f8](https://github.com/ffplayout/ffplayout/commit/694c9f8c4b75f5e1d3c219dfe77317f1d2788627)
- update dependencies, migrate chrono to 0.4.23 [8be1992](https://github.com/ffplayout/ffplayout/commit/8be199222e82c69fa7bcf23c87511642aec7a156)
### frontend
- update dependencies
### Dokumentation
- fix api examples [#232](https://github.com/ffplayout/ffplayout/discussions/232); [#238](https://github.com/ffplayout/ffplayout/issues/238); [694c9f8](https://github.com/ffplayout/ffplayout/commit/694c9f8c4b75f5e1d3c219dfe77317f1d2788627); [8f84b70](https://github.com/ffplayout/ffplayout/commit/8f84b702057b4bcbd6103fe8f8d468f36c09ffa5)
- set EBU R128 loudness normalization again to experimental [f576ded](https://github.com/ffplayout/ffplayout/commit/f576dedcb9ebac259ec2283a622cd521a2f614b8)
- the audio quality is not very good and it is not recommended to use the filter if a good quality is desired
- maybe this function will be removed again in the future
## [0.16.3](https://github.com/ffplayout/ffplayout/compare/v0.16.2...v0.16.3) (2022-11-04)
### ffplayout
- escape characters in drawtext filter [76e26f0](https://github.com/ffplayout/ffplayout/pull/223/commits/76e26f0f704948371638308cb844ee560d679e62)
- revert to old audio codec settings [0e3b9e3](https://github.com/ffplayout/ffplayout/pull/223/commits/0e3b9e3f806f06177883226ebbe49097292df0c7)
- Some how with s302m there is a smaller time delta. MP2 works in general, and also better with loudnorm filter, but s302m is uncompressed and time stays more in sync.
- expose audio channel layout to the config [#222](https://github.com/ffplayout/ffplayout/issues/222), [960280f](https://github.com/ffplayout/ffplayout/pull/223/commits/960280f1423d159fb8a4af79a14f97b35840f3a9)
- ignore muxed as a private data stream warning, validate channel count [6149288](https://github.com/ffplayout/ffplayout/pull/223/commits/6149288d2fbeef8d122c9e44b7420dc795f67d5b)
### Development
- fix cross compile for osx [5cbf5e7](https://github.com/ffplayout/ffplayout/pull/223/commits/5cbf5e7a4c20d9560dada978bad51a7556031b73)
## [0.16.2](https://github.com/ffplayout/ffplayout/compare/v0.16.1...v0.16.2) (2022-10-26)
### ffplayout
- ignore more ffmpeg errors and ignore them also on ingest server [2f8c2de](https://github.com/ffplayout/ffplayout/pull/221/commits/2f8c2deebc857c23f0bdc96ef977aaa174981fd3)
- update dependencies [bdf43f7](https://github.com/ffplayout/ffplayout/pull/221/commits/bdf43f7e6bd765ebb88afac7761a0a246b5cdfb4)
- fix null output, when is set per command line parameter [5b910d6](https://github.com/ffplayout/ffplayout/pull/221/commits/5b910d6e65d6cd1800fffe914a859a2b121be3cf)
- revert to video bitrate and mp2 audio codec [c326c3b](https://github.com/ffplayout/ffplayout/pull/221/commits/c326c3b61fdedf2cd4f609c74160ad5e3c470f43)
- When video bitrate is not fixed the delta delay is more unstable and can reach error threshold. Same is with audio codec pcm_bluray, maybe because it changes the format to m2ts. s302m would be best option, but is not working correctly with loudnorm filter.
- print version in debug level [241d8ee](https://github.com/ffplayout/ffplayout/pull/221/commits/241d8ee3f661f0c2585cd288a695cb5099b05677)
### Dokumentation
- add info for srt ingest [2f8c2de](https://github.com/ffplayout/ffplayout/pull/221/commits/2f8c2deebc857c23f0bdc96ef977aaa174981fd3)
## [0.16.1](https://github.com/ffplayout/ffplayout/compare/v0.16.0...v0.16.1) (2022-10-25)
### ffplayout
- rearrange custom filters (fix missing output mapping on multiple outputs) [9cb3a62](https://github.com/ffplayout/ffplayout/pull/217/commits/9cb3a6206938adcf1fbe4ce0ec763cad9e812c76)
- switch decoder audio codec to pcm_bluray [8b3a80f](https://github.com/ffplayout/ffplayout/pull/218/commits/8b3a80f5602eda240c6a59178c33886c9e81cb1d)
- deserialize drawtext message with struct object and add single quotes around values [1373182](https://github.com/ffplayout/ffplayout/pull/218/commits/1373182c2ad457d34bff449385e73203b9ba5791)
- update dependencies [a246a60](https://github.com/ffplayout/ffplayout/commit/a246a6018eb024cbeac11dd206b76eaffd7fd20c)
### Development
- fix deb and rpm bundle [79e4d5d](https://github.com/ffplayout/ffplayout/pull/217/commits/79e4d5dda05e715df96a38070466ea7a4c8378b2)
- add subtitle example [d0ef717](https://github.com/ffplayout/ffplayout/pull/217/commits/d0ef71767b2af7d6053aeb83e3a6906fb84c984c), [e72967a](https://github.com/ffplayout/ffplayout/pull/217/commits/e72967a21c14ee8c71e18085dc397740d3586d01)
- use NODE_OPTIONS for nodejs 18 [bcf212d](https://github.com/ffplayout/ffplayout/pull/218/commits/bcf212d8de6c2b87571e73cd73023af0e4b7941b)
## [0.16.0](https://github.com/ffplayout/ffplayout/compare/v0.15.2...v0.16.0) (2022-10-19)
### ffplayout
- add option to convert text/m3u file to playlist,fix [#195](https://github.com/ffplayout/ffplayout/issues/195), [69a3e59](https://github.com/ffplayout/ffplayout/commit/69a3e59e3548f082f68ef176acd7043ee0f06902)
- ignore some harmless ffmpeg errors [2ebb4c6](https://github.com/ffplayout/ffplayout/commit/2ebb4c6822e5721beedb3988fbe915c229ee2f20)
- only seek in when seek value is over 0.5 [9d094d9](https://github.com/ffplayout/ffplayout/commit/9d094d983878563960fb7fc222ce9877a583e4e9)
- use realtime video filter only [9d094d9](https://github.com/ffplayout/ffplayout/commit/9d094d983878563960fb7fc222ce9877a583e4e9)
- update dependencies
- add at least anull filter [dcc4616](https://github.com/ffplayout/ffplayout/commit/dcc461642169bf2c5db812c2a806e6d64baf8101)
- multi audio track support, fix [#158](https://github.com/ffplayout/ffplayout/issues/158) [#198](https://github.com/ffplayout/ffplayout/issues/198), [c85e550](https://github.com/ffplayout/ffplayout/commit/c85e5503b432f1c44fcbf11870d2dfc140c65db9)
- add filter type enum [1d11d36](https://github.com/ffplayout/ffplayout/commit/1d11d36ef9cccbdfe215adfe970e8c4219774227)
- switch most integers to i32 [c3b5762](https://github.com/ffplayout/ffplayout/commit/c3b57622bbc19e55d203b5ee66b76ac3307fef10)
- fix wrong log message in HLS mode: Decoder -> Encoder [8a5889b](https://github.com/ffplayout/ffplayout/commit/8a5889be3710e92d88c4ad4815cf5805a77f84c9)
- wait for ffmpeg in validation process to be closed, fixed system zombies [8fe7b87](https://github.com/ffplayout/ffplayout/commit/8fe7b87644b5216b3a39b21264d2246ec610ee10)
- add tests, mostly input and output parameter tests [87c508b](https://github.com/ffplayout/ffplayout/commit/87c508be541cacbbae5d9efedfb903506e573ad5)
- add test files [87c508b](https://github.com/ffplayout/ffplayout/commit/87c508be541cacbbae5d9efedfb903506e573ad5)
- add ProcessMode enum [61f57e2](https://github.com/ffplayout/ffplayout/commit/61f57e2f9e0498d2939f57fade0daf2efbdc2824)
- multi audio outputs [06b5d6a](https://github.com/ffplayout/ffplayout/commit/06b5d6a2275f286f165d173b834f92e18e0514ac)
- fix case when video has no audio, but separate audio is set [a93440e](https://github.com/ffplayout/ffplayout/commit/a93440e06b4533689beae4dd6b07767db300757a)
- allow loudnorm on ingest only [69b6207](https://github.com/ffplayout/ffplayout/commit/69b62071656c3d4a3ab8b0f84341c1f584d47e40)
- use named drawtext filter instead of getting its index [84addbc](https://github.com/ffplayout/ffplayout/commit/84addbcb2a21725f2de34d2b4602ee95f1753311)
- use filters struct for stream encoder [096c018f](https://github.com/ffplayout/ffplayout/commit/096c018fe38a0653c1dfc279775b7131584f5463)
- unify null output [31b72db](https://github.com/ffplayout/ffplayout/commit/31b72db10640a6508ab50eca43625f04c26f2030)
- build output filters from scratch, fix [#210](https://github.com/ffplayout/ffplayout/issues/210), [09dace9](https://github.com/ffplayout/ffplayout/commit/09dace92f4100aecfc92ad7df06f1e8b7174f690)
- simplify prepare_output_cmd [4afba402](https://github.com/ffplayout/ffplayout/commit/4afba4028aad488d404db9b09bac3166d7f33917)
- validate config regex
### ffpapi
- restructure api [ec4f5d2](https://github.com/ffplayout/ffplayout/commit/ec4f5d2ac23718aa6c3fc23f698f34a2e31b326b)
- import playlist from text file [#195](https://github.com/ffplayout/ffplayout/issues/195), [ec4f5d2](https://github.com/ffplayout/ffplayout/commit/ec4f5d2ac23718aa6c3fc23f698f34a2e31b326b)
### frontend
- style scrollbar on chrome browser [8be260a](https://github.com/ffplayout/ffplayout/commit/8be260ae207d33487f51ebd8f98eb26e16298bdb)
### Dokumentation
- add import example
- add new import cli parameter
- add doc for multiple audio outputs
- add info about experimental features
### Development
- use ffmpeg in action
- run tests only on Linux
## [0.15.0](https://github.com/ffplayout/ffplayout/compare/v0.15.0...v0.15.2) (2022-09-02)
### ffplayout
- validate file compression settings and filtering [9c51226](https://github.com/ffplayout/ffplayout/commit/9c5122696dc9065ff670c54abd0f87945b8865e1)
- fix length from filler clip in playlist generator [9c51226](https://github.com/ffplayout/ffplayout/commit/9c5122696dc9065ff670c54abd0f87945b8865e1)
- serialize values only when string is not empty [9c51226](https://github.com/ffplayout/ffplayout/commit/9c5122696dc9065ff670c54abd0f87945b8865e1)
- compare also audio and custom filter on playlist existing check [9c51226](https://github.com/ffplayout/ffplayout/commit/9c5122696dc9065ff670c54abd0f87945b8865e1)
- stop only when error comes not from hls segment deletion [a62c1d0](https://github.com/ffplayout/ffplayout/commit/a62c1d07c7e4f62ccd3e4158f6b5f50ee76a67cc)
- fix unwrap error on None output_cmd [7cd8789](https://github.com/ffplayout/ffplayout/commit/7cd87896a46833996986166dff7f89421b5cfb2d)
### ffpapi
- get UTC offset from system [6ff34e0](https://github.com/ffplayout/ffplayout/commit/6ff34e0ddb1940aeb7b69e4d6b6f35b348a6f541)
### frontend
- get UTC offset from API, fix [#182](https://github.com/ffplayout/ffplayout/issues/182)
- fix bugs related to time and playlist save [03aa2f3](https://github.com/ffplayout/ffplayout/commit/03aa2f3b01a79c93f650eeba6830be85d1293fec)
- add edit button to playlist items [03aa2f3](https://github.com/ffplayout/ffplayout/commit/03aa2f3b01a79c93f650eeba6830be85d1293fec)
- add custom filter to playlist item [03aa2f3](https://github.com/ffplayout/ffplayout/commit/03aa2f3b01a79c93f650eeba6830be85d1293fec)
- better responsive control [46140b4](https://github.com/ffplayout/ffplayout/commit/46140b42839485a37127a7add8818b7f6abf8417)
- remove perfect-scrollbar (use only browser scrollbar)
- fix logout button in menu
- remove escape character
- fix browser errors when engine is not running
### Dokumentation
- Fix spelling in Readme
- Add filtergraph/pipeline description
- Add complex custom filter example

View File

@ -1,52 +1,26 @@
## Contribute to ffplayout How to contribute to ffplayout engine
-----
### Report a bug #### Did you need general help?
- Check issues if the bug was already reported. - Search in all issues if your question was already ask.
- When this bug was not reported, please use the **bug report** template. - Please give a detailed explanation of your problem and your goal.
* try to fill out every step - Give as much information as you can, like:
* use code blocks for config, log and command line parameters 1. which system you are using
* text from config and logging is preferred over screenshots 2. python version
3. ffmpeg version and libs
4. your ffplayout.yml config file
5. your log files
- Ask your question in a way, that we don't need to ask for more details and background.
### Ask for help #### Did you found a bug?
When something is not working, you can feel free to ask your question under [discussions](https://github.com/ffplayout/ffplayout/discussions/categories/q-a). But please make some effort, so it makes it more easy to help. Please don't open discussion in a "WhatsApp style", with only one line of text. As a general rule of thumb answer this points: Try first the main branch, if this bug still exists there use the **Bug Report** issue template and fill up everything.
- what do you want to achieve? #### You have a feature request?
- what have you already tried?
- have you looked at the help documents under [/docs/](/docs)?
- what exactly is not working?
- relevant logging output
- current configuration (ffplayout.yml)
#### Sharing Screenshots Please use the **Feature Request** issue template and fill up everything.
All kinds of logging and terminal outputs please share in a code block that is surrounded by **```**. #### You want to make a pull request?
That is wonderful! But please use the same code style. This project tries to be PEP8 conform.
When something is wrong in the frontend you can also share as a screenshot/screen record, but please share them with English language selected. If you add new functions, create also a [test](https://github.com/ffplayout/ffplayout_engine/tree/master/tests) for it.
#### Sample files
If playout works normally on your system with the [provided test clips](https://github.com/ffplayout/ffplayout/tree/master/tests/assets/media_sorted), but your files produce errors and you are sure that the problem is related to ffplayout, you can provide a test file under these conditions:
- ffmpeg can process the file normally.
- The file is not too large, a few seconds should be enough.
- The video doesn't contain any illegal content.
- You have legal permission to distribute the file.
- The content is not age restricted (no violent or sexual content).
### Feature request
You can ask for features, but it can not be guaranteed that this will find its way to the code basis. Try to think if your idea is useful for others to and describe it in a understandable way. If your idea is accepted, it can take time until it will be apply. In general stability goes over features, and when just a new version has arrived, it can take time to prove itself in production.
### Create a pull request
In general pull requests are very welcome! But please don't create features, which are to specific and helps only your use case and no one else. If your are not sure, better ask before you start.
Please also follow the code style from this project, and before you create your pull request check your code with:
```BASH
cargo fmt --all -- --check
cargo clippy --all-features --all-targets -- --deny warnings
```
For bigger changes and complied new functions a test is required.

4664
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +0,0 @@
[workspace]
members = ["engine", "tests"]
resolver = "2"
[workspace.package]
description = "24/7 playout based on rust and ffmpeg"
readme = "README.md"
version = "0.24.0"
license = "GPL-3.0"
repository = "https://github.com/ffplayout/ffplayout"
authors = ["Jonathan Baecker <jonbae77@gmail.com>"]
edition = "2021"
[profile.release]
opt-level = 3
strip = true
lto = true

View File

@ -1,41 +0,0 @@
[target.x86_64-unknown-linux-musl]
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.aarch64-unknown-linux-gnu]
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.x86_64-pc-windows-gnu]
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.x86_64-apple-darwin]
image = "ghcr.io/cross-rs/x86_64-apple-darwin-cross:local"
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.aarch64-apple-darwin]
image = "ghcr.io/cross-rs/aarch64-apple-darwin-cross:local"
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]

157
README.md
View File

@ -1,34 +1,40 @@
**ffplayout** **ffplayout_engine**
================ ================
[![made-with-python](https://img.shields.io/badge/Made%20with-Python-1f425f.svg)](https://www.python.org/)
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0) [![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
![player](/docs/images/player.png) ## Attention: Version 4.0 will be the last release in Python
[ffplayout](/ffplayout-engine/README.md) is a 24/7 broadcasting solution. It can playout a folder containing audio or video clips, or play a *JSON* playlist for each day, keeping the current playlist editable. After that the code base will be changed to Rust.
The ffplayout applications are mostly designed to run as system services on Linux. But in general they should run on any platform supported by Rust. -----
Check the [releases](https://github.com/ffplayout/ffplayout/releases/latest) for pre compiled version. The purpose with ffplayout is to provide a 24/7 broadcasting solution that plays a *json* playlist for every day, while keeping the current playlist editable.
### Features **Check [ffplayout-frontend](https://github.com/ffplayout/ffplayout-frontend): web-based GUI for ffplayout**
- start program with [web based frontend](/frontend/), or run playout in foreground mode without frontend **Features**
-----
- have all values in a separate config file
- dynamic playlist - dynamic playlist
- replace missing playlist or clip with single filler or multiple fillers from folder, if no filler exists, create dummy clip - replace missing playlist or clip with a dummy clip
- playing clips in [watched](/docs/folder_mode.md) folder mode - playing clips from [watched folder](https://github.com/ffplayout/ffplayout_engine/wiki/Watch-Folder)
- send emails with error message - send emails with error message
- overlay a logo - overlay a logo
- overlay text, controllable through [web frontend](/frontend/) (needs ffmpeg with libzmq and enabled JSON RPC server) - overlay text, controllable through [messenger](https://github.com/ffplayout/messenger) or [ffplayout-frontend](https://github.com/ffplayout/ffplayout-frontend) (needs ffmpeg with libzmq)
- **EBU R128 loudness** normalization (single pass) (experimental)
- loop clip in playlist which `out` value is higher then its `duration`, see also [Loop Clip](https://github.com/ffplayout/ffplayout_engine/wiki/Loop-Clip)
- loop playlist infinitely - loop playlist infinitely
- [remote source](/docs/remote_source.md)
- trim and fade the last clip, to get full 24 hours - trim and fade the last clip, to get full 24 hours
- when playlist is not 24 hours long, loop fillers until time is full - when playlist is not 24 hours long, loop filler clip until time is full
- set custom day start, so you can have playlist for example: from 6am to 6am, instate of 0am to 12pm - set custom day start, so you can have playlist for example: from 6am to 6am, instate of 0am to 12pm
- normal system requirements and no special tools - normal system requirements and no special tools
- no GPU power is needed - no GPU power is needed
- stream to server or play on desktop - stream to server or play on desktop
- log to files or color output to console - on posix systems ffplayout can reload config with *SIGHUP*
- logging to files, or colored output to console
- add filters to input, if is necessary to match output stream: - add filters to input, if is necessary to match output stream:
- **yadif** (deinterlacing) - **yadif** (deinterlacing)
- **pad** (letterbox or pillarbox to fit aspect) - **pad** (letterbox or pillarbox to fit aspect)
@ -37,40 +43,31 @@ Check the [releases](https://github.com/ffplayout/ffplayout/releases/latest) for
- **aevalsrc** (if video have no audio) - **aevalsrc** (if video have no audio)
- **apad** (add silence if audio duration is to short) - **apad** (add silence if audio duration is to short)
- **tpad** (add black frames if video duration is to short) - **tpad** (add black frames if video duration is to short)
- [output](/docs/output.md): - Live ingest (experimental)
- add custom [filters](https://github.com/ffplayout/ffplayout_engine/tree/master/ffplayout/filters)
- add custom [arguments](https://github.com/ffplayout/ffplayout_engine/tree/master/ffplayout/conf.d)
- different [play modes](https://github.com/ffplayout/ffplayout_engine/tree/master/ffplayout/player):
- different types of [output](https://github.com/ffplayout/ffplayout_engine/tree/master/ffplayout/output):
- **stream** - **stream**
- **desktop** - **desktop**
- **HLS** - **live_switch**
- **null** (for debugging) - **hls**
- [live ingest](/docs/live_ingest.md) - **custom**
- image source (will loop until out duration is reached) - Multi channel
- extra audio source, has priority over audio from video (experimental *)
- [multiple audio tracks](/docs/multi_audio.md) (experimental *)
- [Stream Copy](/docs/stream_copy.md) mode (experimental *)
- [custom filters](/docs/custom_filters.md) globally in config, or in playlist for specific clips
- import playlist from text or m3u file, with CLI or frontend
- audio only, for radio mode (experimental *)
- generate playlist based on [template](/docs/playlist_gen.md) (experimental *)
- During playlist import, all video clips are validated and, if desired, checked to ensure that the audio track is not completely muted.
- run multiple channels (experimental *)
For preview stream, read: [/docs/preview_stream.md](/docs/preview_stream.md)
**\* Experimental features do not guarantee the same stability and may fail under unusual circumstances. Code and configuration options may change in the future.**
### Requirements
- RAM and CPU depends on video resolution, minimum 4 _dedicated_ threads and 3GB RAM for 720p are recommend
- **ffmpeg** v5.0+ and **ffprobe** (**ffplay** if you want to play on desktop)
- if you want to overlay dynamic text, ffmpeg needs to have **libzmq**
### Install
Check [install](docs/install.md) for details about how to install ffplayout.
Requirements
----- -----
### JSON Playlist Example - python version 3.7+, dev version 3.9
- python module **watchdog** (only for folder mode)
- python module **colorama** if you are on windows
- python modules **PyYAML**, **requests**, **supervisor**
- **ffmpeg v4.2+** and **ffprobe** (**ffplay** if you want to play on desktop)
- if you want to overlay text, ffmpeg needs to have **libzmq**
- RAM and CPU depends on video resolution, minimum 4 threads and 3GB RAM for 720p are recommend
JSON Playlist Example
-----
```json ```json
{ {
@ -81,55 +78,77 @@ Check [install](docs/install.md) for details about how to install ffplayout.
"out": 647.68, "out": 647.68,
"duration": 647.68, "duration": 647.68,
"source": "/Media/clip1.mp4" "source": "/Media/clip1.mp4"
}, {
"in": 0,
"out": 890.02,
"duration": 890.02,
"source": "/Media/clip2.mp4",
"custom_filter": "eq=gamma_b=0.6:gamma_g=0.7[c_v_out]"
}, { }, {
"in": 0, "in": 0,
"out": 149, "out": 149,
"duration": 149, "duration": 149,
"source": "/Media/clip3.mp4", "source": "/Media/clip2.mp4"
"category": "advertisement"
}, { }, {
"in": 0, "in": 0,
"out": 114.72, "out": 114.72,
"duration": 114.72, "duration": 114.72,
"source": "/Media/image1.jpg", "source": "/Media/clip3.mp4",
}, { "category": "advertisement"
"in": 0,
"out": 230.30,
"duration": 230.30,
"source": "/Media/image2.jpg",
"audio": "/Media/audio1.mp3"
}, { }, {
"in": 0, "in": 0,
"out": 2531.36, "out": 2531.36,
"duration": 2531.36, "duration": 2531.36,
"source": "https://example.org/big_buck_bunny.webm", "source": "/Media/clip4.mp4",
"category": "" "category": ""
} }
] ]
} }
``` ```
If you are in playlist mode and move backwards or forwards in time, the time shift is saved so the playlist is still in sync. Bear in mind, however, that this may make your playlist too short. If you do not reset it, it will automatically reset the next day.
## **Warning** **If you need a simple playlist generator check:** [playlist-generator](https://github.com/ffplayout/playlist-generator)
(Endless) streaming over multiple days will only work if config has a **day_start** value and the **length** value is **24 hours**. If you only need a few hours for each day, use a *cron* job or something similar. The playlist can be extend, to use custom attributes in your [filters](/ffplayout/filters/).
## Note
This project includes the DejaVu font, which are licensed under the [Bitstream Vera Fonts License](/assets/FONT_LICENSE.txt).
**Warning**
----- -----
## Sponsoring (Endless) streaming over multiple days will only work when config have **day_start** value and the **length** value is **24 hours**. If you need only some hours for every day, use a *cron* job, or something similar.
If you like this project and would like to make a donation, please use one of the options provided. Remote source from URL
Please note that donations are not intended to get support or features! Donations are only a sign of appreciation. -----
### Backers You can use sources from remote URL in that way:
[![](https://opencollective.com/ffplayout/backers.svg?width=800&button=true)](https://opencollective.com/ffplayout) ```json
{
"in": 0,
"out": 149,
"duration": 149,
"source": "https://example.org/big_buck_bunny.webm"
}
```
But be careful with it, better test it multiple times!
More informations in [Wiki](https://github.com/ffplayout/ffplayout_engine/wiki/Remote-URL-Source)
Installation
-----
Check [INSTALL.md](docs/INSTALL.md)
Start with Arguments
-----
ffplayout also allows the passing of parameters:
- `-c, --config` use given config file
- `-f, --folder` use folder for playing
- `-l, --log` for user-defined log path, *none* for console output
- `-i, --loop` loop playlist infinitely
- `-o, --output` set output mode: **desktop**, **hls**, **stream**, ...
- `-p, --playlist` for playlist file
- `-s, --start` set start time in *hh:mm:ss*, *now* for start at playlist begin
- `-t, --length` set length in *hh:mm:ss*, *none* for no length check
- `-pm, --play_mode` playing mode: folder, playlist, custom...
You can run the command like:
```SHELL
./ffplayout.py -l none -p ~/playlist.json -s now -t none -o desktop
```

Binary file not shown.

View File

@ -1,187 +0,0 @@
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
Glyphs imported from Arev fonts are (c) Tavmjong Bah (see below)
Bitstream Vera Fonts Copyright
------------------------------
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is
a trademark of Bitstream, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of the fonts accompanying this license ("Fonts") and associated
documentation files (the "Font Software"), to reproduce and distribute the
Font Software, including without limitation the rights to use, copy, merge,
publish, distribute, and/or sell copies of the Font Software, and to permit
persons to whom the Font Software is furnished to do so, subject to the
following conditions:
The above copyright and trademark notices and this permission notice shall
be included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular
the designs of glyphs or characters in the Fonts may be modified and
additional glyphs or characters may be added to the Fonts, only if the fonts
are renamed to names not containing either the words "Bitstream" or the word
"Vera".
This License becomes null and void to the extent applicable to Fonts or Font
Software that has been modified and is distributed under the "Bitstream
Vera" names.
The Font Software may be sold as part of a larger software package but no
copy of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING
ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE
FONT SOFTWARE.
Except as contained in this notice, the names of Gnome, the Gnome
Foundation, and Bitstream Inc., shall not be used in advertising or
otherwise to promote the sale, use or other dealings in this Font Software
without prior written authorization from the Gnome Foundation or Bitstream
Inc., respectively. For further information, contact: fonts at gnome dot
org.
Arev Fonts Copyright
------------------------------
Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the fonts accompanying this license ("Fonts") and
associated documentation files (the "Font Software"), to reproduce
and distribute the modifications to the Bitstream Vera Font Software,
including without limitation the rights to use, copy, merge, publish,
distribute, and/or sell copies of the Font Software, and to permit
persons to whom the Font Software is furnished to do so, subject to
the following conditions:
The above copyright and trademark notices and this permission notice
shall be included in all copies of one or more of the Font Software
typefaces.
The Font Software may be modified, altered, or added to, and in
particular the designs of glyphs or characters in the Fonts may be
modified and additional glyphs or characters may be added to the
Fonts, only if the fonts are renamed to names not containing either
the words "Tavmjong Bah" or the word "Arev".
This License becomes null and void to the extent applicable to Fonts
or Font Software that has been modified and is distributed under the
"Tavmjong Bah Arev" names.
The Font Software may be sold as part of a larger software package but
no copy of one or more of the Font Software typefaces may be sold by
itself.
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL
TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the name of Tavmjong Bah shall not
be used in advertising or otherwise to promote the sale, use or other
dealings in this Font Software without prior written authorization
from Tavmjong Bah. For further information, contact: tavmjong @ free
. fr.
TeX Gyre DJV Math
-----------------
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
Math extensions done by B. Jackowski, P. Strzelczyk and P. Pianowski
(on behalf of TeX users groups) are in public domain.
Letters imported from Euler Fraktur from AMSfonts are (c) American
Mathematical Society (see below).
Bitstream Vera Fonts Copyright
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera
is a trademark of Bitstream, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of the fonts accompanying this license (“Fonts”) and associated
documentation
files (the “Font Software”), to reproduce and distribute the Font Software,
including without limitation the rights to use, copy, merge, publish,
distribute,
and/or sell copies of the Font Software, and to permit persons to whom
the Font Software is furnished to do so, subject to the following
conditions:
The above copyright and trademark notices and this permission notice
shall be
included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular
the designs of glyphs or characters in the Fonts may be modified and
additional
glyphs or characters may be added to the Fonts, only if the fonts are
renamed
to names not containing either the words “Bitstream” or the word “Vera”.
This License becomes null and void to the extent applicable to Fonts or
Font Software
that has been modified and is distributed under the “Bitstream Vera”
names.
The Font Software may be sold as part of a larger software package but
no copy
of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
FOUNDATION
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL,
SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN
ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR
INABILITY TO USE
THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the names of GNOME, the GNOME
Foundation,
and Bitstream Inc., shall not be used in advertising or otherwise to promote
the sale, use or other dealings in this Font Software without prior written
authorization from the GNOME Foundation or Bitstream Inc., respectively.
For further information, contact: fonts at gnome dot org.
AMSFonts (v. 2.2) copyright
The PostScript Type 1 implementation of the AMSFonts produced by and
previously distributed by Blue Sky Research and Y&Y, Inc. are now freely
available for general use. This has been accomplished through the
cooperation
of a consortium of scientific publishers with Blue Sky Research and Y&Y.
Members of this consortium include:
Elsevier Science IBM Corporation Society for Industrial and Applied
Mathematics (SIAM) Springer-Verlag American Mathematical Society (AMS)
In order to assure the authenticity of these fonts, copyright will be
held by
the American Mathematical Society. This is not meant to restrict in any way
the legitimate use of the fonts, such as (but not limited to) electronic
distribution of documents containing these fonts, inclusion of these fonts
into other public domain or commercial font collections or computer
applications, use of the outline data to create derivative fonts and/or
faces, etc. However, the AMS does require that the AMS copyright notice be
removed from any derivative versions of the fonts which have been altered in
any way. In addition, to ensure the fidelity of TeX documents using Computer
Modern fonts, Professor Donald Knuth, creator of the Computer Modern faces,
has requested that any alterations which yield different font metrics be
given a different name.
$Id$

View File

@ -1 +0,0 @@
WEBVTT

View File

@ -1,47 +0,0 @@
server {
listen 80;
server_name ffplayout.local;
gzip on;
gzip_types text/plain application/xml text/css application/javascript;
gzip_min_length 1000;
charset utf-8;
client_max_body_size 7000M; # should be desirable value
add_header X-Frame-Options SAMEORIGIN;
add_header X-Content-Type-Options nosniff;
add_header X-XSS-Protection "1; mode=block";
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 36000s;
proxy_connect_timeout 36000s;
proxy_send_timeout 36000s;
proxy_buffer_size 128k;
proxy_buffers 4 256k;
proxy_busy_buffers_size 256k;
send_timeout 36000s;
proxy_pass http://127.0.0.1:8787;
}
location /data {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Connection "";
proxy_http_version 1.1;
proxy_pass http://127.0.0.1:8787/data;
}
location /live/ {
alias /usr/share/ffplayout/public/live/;
}
}

View File

@ -1,14 +0,0 @@
[Unit]
Description=Rust and ffmpeg based playout solution
After=network.target remote-fs.target
[Service]
ExecStart=/usr/bin/ffplayout -l 0.0.0.0:8787
Restart=always
StartLimitInterval=20
RestartSec=1
KillMode=mixed
User=ffpu
[Install]
WantedBy=multi-user.target

28
debian/postinst vendored
View File

@ -1,28 +0,0 @@
#!/bin/sh
#DEBHELPER#
sysUser="ffpu"
if [ -f /run/.containerenv ] || [ -f /run/.dockerenv ] || [ -f /.dockerenv ] || [ -f /.dockerinit ]; then
sysUser="root"
fi
if [ ! $(id -u $sysUser 2>/dev/null || echo -1) -ge 0 ]; then
adduser --system --create-home $sysUser > /dev/null || adduser --system --home "/home/$sysUser" $sysUser
fi
if [ ! -d "/usr/share/ffplayout/db" ]; then
mkdir "/usr/share/ffplayout/db"
mkdir -p "/usr/share/ffplayout/public/live"
mkdir -p "/var/lib/ffplayout/playlists"
mkdir -p "/var/lib/ffplayout/tv-media"
chown -R ${sysUser}: "/usr/share/ffplayout"
chown -R ${sysUser}: "/var/lib/ffplayout"
fi
if [ ! -d "/var/log/ffplayout" ]; then
mkdir "/var/log/ffplayout"
chown ${sysUser}: "/var/log/ffplayout"
fi

23
debian/postrm vendored
View File

@ -1,23 +0,0 @@
#!/bin/sh
#DEBHELPER#
sysUser="ffpu"
case "$1" in
abort-install|purge)
deluser $sysUser
rm -rf /usr/share/ffplayout /var/log/ffplayout /var/lib/ffplayout /home/$sysUser
;;
remove)
rm -rf /var/log/ffplayout
;;
upgrade|failed-upgrade|abort-upgrade|disappear)
;;
*)
echo "postrm called with unknown argument \`$1'" >&2
exit 1
;;
esac

View File

@ -1,38 +0,0 @@
FROM alpine:latest
ARG FFPLAYOUT_VERSION=0.24.0-rc3
ARG SHARED_STORAGE=false
ENV DB=/db
ENV SHARED_STORAGE=${SHARED_STORAGE}
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
COPY <<-EOT /run.sh
#!/bin/sh
if [ ! -f /db/ffplayout.db ]; then
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
fi
/usr/bin/ffplayout -l "0.0.0.0:8787"
EOT
RUN apk update && \
apk upgrade && \
apk add --no-cache ffmpeg sqlite font-dejavu && \
chmod +x /run.sh
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
cd /tmp && \
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
cp ffplayout /usr/bin/ && \
mkdir -p /usr/share/ffplayout/ && \
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
rm -rf /tmp/* && \
mkdir ${DB}
EXPOSE 8787
CMD ["/run.sh"]

View File

@ -1,177 +0,0 @@
# Run ffplayout in container
The image is build with a default user/pass `admin/admin`.
You can take a look at the [Dockerfile](Dockerfile)
### /!\ as ffmpeg is compiled with `--enable-nonfree` don't push it to a public registry nor distribute the image /!\
## Storage
There are some folders/files that are important for ffplayout to work well such as:
- **/usr/share/ffplayout/db** => where all the data are stored (user/pass etc)
- **/var/lib/ffplayout/tv-media** => where the media are stored by default (configurable)
- **/var/lib/ffplayout/playlists** => where playlists are stored (configurable)
It may be useful to create/link volume for those folders/files.
## Docker
How to build the image:\
```BASH
# build default
docker build -t ffplayout-image .
# build from root folder, to copy *.tar.gz with self compiled binary
docker build -f docker/Dockerfile -t ffplayout-image .
# build ffmpeg from source
docker build -f ffmpeg.Dockerfile -t ffmpeg-build .
docker build -f nonfree.Dockerfile -t ffplayout-image:nonfree .
# build with nvidia image for hardware support
docker build -f nvidia.Dockerfile -t ffplayout-image:nvidia .
```
example of command to start the container:
```BASH
docker run -it -v /path/to/db:/db -v /path/to/storage:/tv-media -v /path/to/playlists:/playlists -v /path/to/public:/public -v /path/to/logging:/logging --name ffplayout -p 8787:8787 ffplayout-image
# run in daemon mode
docker run -d --name ffplayout -p 8787:8787 ffplayout-image
# run with docker-compose
docker-compose up -d
```
For setup mail server settings run:
```
docker exec -it ffplayout ffplayout -i
```
Then restart Container
#### Note from CentOS docker hub page
There have been reports that if you're using an Ubuntu host, you will need to add `-v /tmp/$(mktemp -d):/run` to the mount.
## Kubernetes
basic example to run the service in k8s:
```
---
apiVersion: apps/v1
kind: Deployment
metadata:
labels:
app: ffplayout
name: ffplayout
namespace: ffplayout
spec:
replicas: 1
selector:
matchLabels:
app: ffplayout
strategy:
type: Recreate
template:
metadata:
labels:
app: ffplayout
spec:
containers:
- name: ffplayout
securityContext:
allowPrivilegeEscalation: true
capabilities:
add:
- SYS_ADMIN
image: ffplayout-image:latest
ports:
- containerPort: 8787
name: web
protocol: TCP
volumeMounts:
- name: cgroup
mountPath: /sys/fs/cgroup
readOnly: true
- name: database-volume
mountPath: /usr/share/ffplayout/db
restartPolicy: Always
volumes:
- name: cgroup
hostPath:
path: '/sys/fs/cgroup'
type: Directory
- name: database-volume
ephemeral:
volumeClaimTemplate:
metadata:
labels:
type: my-database-volume
spec:
accessModes: [ "ReadWriteOnce" ]
storageClassName: "database-storage-class"
resources:
requests:
storage: 1Gi
```
### Use with traefik
If you are using traefik here is a sample config
```
---
kind: Service
apiVersion: v1
metadata:
name: ffplayout
namespace: ffplayout
spec:
ports:
- port: 8787
name: web
protocol: TCP
selector:
app: ffplayout
---
apiVersion: traefik.containo.us/v1alpha1
kind: IngressRoute
metadata:
name: ffplayout-http
namespace: ffplayout
spec:
entryPoints:
- web
routes:
- match: Host(`ffplayout.example.com`) && PathPrefix(`/`)
kind: Rule
middlewares:
- name: redirect-https
namespace: default
services:
- name: ffplayout
namespace: ffplayout
port: 8787
---
apiVersion: traefik.containo.us/v1alpha1
kind: IngressRoute
metadata:
name: ffplayout-https
namespace: ffplayout
spec:
entryPoints:
- websecure
routes:
- match: Host(`ffplayout.example.com`) && PathPrefix(`/`)
kind: Rule
services:
- name: ffplayout
namespace: ffplayout
port: 8787
tls:
certResolver: yourCert
```

View File

@ -1,16 +0,0 @@
version: '3'
services:
ffplayout:
container_name: ffplayout
build:
context: .
dockerfile: ./Dockerfile
volumes:
- ./data/db:/db
- ./data/storage:/tv-media
- ./data/playlists:/playlists
- ./data/logging:/logging
- ./data/public:/public
ports:
- '8787:8787'

View File

@ -1,158 +0,0 @@
FROM alpine:latest as builder
ENV EXTRA_CFLAGS=-march=generic \
LOCALBUILDDIR=/tmp/build \
LOCALDESTDIR=/tmp/local \
PKG_CONFIG="pkg-config --static" \
PKG_CONFIG_PATH=/tmp/local/lib/pkgconfig \
CPPFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CXXFLAGS="-I/tmp/local/include -O2 -fPIC" \
LDFLAGS="-L/tmp/local/lib -pipe -Wl,-z,relro,-z,now -static" \
CC=clang
RUN apk add --no-cache \
clang \
glib-dev glib-static \
coreutils \
autoconf \
automake \
build-base \
cmake \
git \
libtool \
nasm \
pkgconfig \
yasm \
wget \
curl \
ninja-build \
meson \
cargo cargo-c \
diffutils \
bash
RUN apk add --no-cache \
zlib-dev zlib-static \
bzip2-dev bzip2-static \
expat-dev expat-static \
libxml2-dev libxml2-static \
fontconfig-dev fontconfig-static \
freetype freetype-dev freetype-static \
fribidi-dev fribidi-static \
harfbuzz-dev harfbuzz-static \
graphite2-static \
numactl-dev \
brotli-dev brotli-static \
soxr-dev soxr-static \
libjpeg-turbo libjpeg-turbo-dev \
libpng-dev libpng-static \
xvidcore-dev xvidcore-static \
libsodium-dev libsodium-static \
zeromq-dev libzmq-static \
openssl-dev openssl-libs-static
WORKDIR /tmp
RUN git clone --depth 1 "https://github.com/libass/libass.git" && cd libass && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && cd fdk-aac && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "lame-3.100.tar.gz" "https://downloads.sourceforge.net/project/lame/lame/3.100/lame-3.100.tar.gz" && \
tar xf "lame-3.100.tar.gz" && \
cd "lame-3.100" && \
./configure --prefix="$LOCALDESTDIR" --enable-expopt=full --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "opus-1.4.tar.gz" "https://ftp.osuosl.org/pub/xiph/releases/opus/opus-1.4.tar.gz" && \
tar xf "opus-1.4.tar.gz" && \
cd "opus-1.4" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --enable-static --disable-doc && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/Haivision/srt.git" && cd srt && \
mkdir build && \
cd build && \
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DOPENSSL_USE_STATIC_LIBS=ON -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON -DENABLE_CXX11:BOOLEAN=ON -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc) && \
make install
RUN git clone "https://github.com/webmproject/libvpx.git" && cd libvpx && \
./configure --prefix="$LOCALDESTDIR" --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
make -j $(nproc) && \
make install
RUN git clone "https://code.videolan.org/videolan/x264" && cd x264 && \
./configure --prefix="$LOCALDESTDIR" --enable-static && \
make -j $(nproc) && \
make install
RUN git clone "https://bitbucket.org/multicoreware/x265_git.git" && cd x265_git/build && \
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG $CXXFLAGS" && \
make -j $(nproc) && \
make install
RUN git clone "https://github.com/xiph/rav1e.git" && cd rav1e && \
RUSTFLAGS="-C target-feature=+crt-static" cargo cinstall --release --jobs $(nproc) --prefix=$LOCALDESTDIR --libdir=$LOCALDESTDIR/lib --includedir=$LOCALDESTDIR/include
RUN git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && cd SVT-AV1/Build && \
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && cd dav1d && \
mkdir build && cd build && \
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
ninja && \
ninja install
RUN git clone --depth 1 https://git.ffmpeg.org/ffmpeg.git && cd ffmpeg && \
sed -i 's/add_ldexeflags -fPIE -pie/add_ldexeflags -fPIE -static-pie/' configure && \
./configure \
--pkg-config-flags=--static \
--extra-cflags="-fopenmp -DZMG_STATIC" \
--extra-ldflags="-fopenmp -Wl,--copy-dt-needed-entries -Wl,--allow-multiple-definition" \
--enable-runtime-cpudetect \
--prefix=/usr/local \
--disable-debug \
--disable-doc \
--disable-ffplay \
--disable-shared \
--enable-gpl \
--enable-version3 \
--enable-nonfree \
--enable-small \
--enable-static \
--enable-libass \
--enable-fontconfig \
--enable-libfdk-aac \
--enable-libfribidi \
--enable-libfreetype \
--enable-libharfbuzz \
--enable-libmp3lame \
--enable-libopus \
--enable-libsoxr \
--enable-libsrt \
--enable-libvpx \
--enable-libx264 \
--enable-libx265 \
--enable-libzmq \
--enable-nonfree \
--enable-openssl \
--enable-libsvtav1 \
--enable-librav1e \
--enable-libdav1d \
--enable-libxvid && \
make -j $(nproc) && \
make install
RUN strip /usr/local/bin/ffmpeg /usr/local/bin/ffprobe

View File

@ -1,40 +0,0 @@
FROM alpine:latest
ARG FFPLAYOUT_VERSION=0.24.0-rc3
ARG SHARED_STORAGE=false
ENV DB=/db
ENV SHARED_STORAGE=${SHARED_STORAGE}
COPY --from=ffmpeg-build /usr/local/bin/ffmpeg /usr/local/bin/ffmpeg
COPY --from=ffmpeg-build /usr/local/bin/ffprobe /usr/local/bin/ffprobe
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
COPY <<-EOT /run.sh
#!/bin/sh
if [ ! -f /db/ffplayout.db ]; then
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
fi
/usr/bin/ffplayout -l "0.0.0.0:8787"
EOT
RUN apk update && \
apk upgrade && \
apk add --no-cache sqlite font-dejavu && \
chmod +x /run.sh
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
cd /tmp && \
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
cp ffplayout /usr/bin/ && \
mkdir -p /usr/share/ffplayout/ && \
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
rm -rf /tmp/* && \
mkdir ${DB}
EXPOSE 8787
CMD ["/run.sh"]

View File

@ -1,227 +0,0 @@
FROM nvidia/cuda:12.5.0-runtime-rockylinux9
ARG FFPLAYOUT_VERSION=0.24.0-rc3
ARG SHARED_STORAGE=false
ENV DB=/db
ENV SHARED_STORAGE=${SHARED_STORAGE}
ENV EXTRA_CFLAGS=-march=generic \
LOCALBUILDDIR=/tmp/build \
LOCALDESTDIR=/tmp/local \
PKG_CONFIG="pkg-config --static" \
PKG_CONFIG_PATH="/usr/lib64/pkgconfig/:/tmp/local/lib/pkgconfig" \
CPPFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CXXFLAGS="-I/tmp/local/include -O2 -fPIC" \
LDFLAGS="-L/tmp/local/lib -pipe -Wl,-z,relro,-z,now -static" \
CC=clang
RUN dnf clean all -y && \
dnf makecache --refresh && \
dnf install -y epel-release && \
dnf config-manager --set-enabled crb
RUN dnf install -y which sqlite libstdc++-static libtool autoconf clang \
cmake ninja-build cargo ragel meson git pkgconfig bzip2 \
python3-devel gperf perl glibc-static binutils-devel \
nasm rsync wget
WORKDIR /tmp
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "zlib-1.3.1.tar.gz" "https://zlib.net/zlib-1.3.1.tar.gz" && \
tar xf "zlib-1.3.1.tar.gz" && \
cd "zlib-1.3.1" && \
./configure --prefix="$LOCALDESTDIR" --static && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "openssl-1.1.1u.tar.gz" "https://www.openssl.org/source/openssl-1.1.1u.tar.gz" && \
tar xf "openssl-1.1.1u.tar.gz" && \
cd "openssl-1.1.1u" && \
./Configure --prefix=$LOCALDESTDIR --openssldir=$LOCALDESTDIR linux-x86_64 --libdir="$LOCALDESTDIR/lib" no-shared enable-camellia enable-idea enable-mdc2 enable-rfc3779 -static-libstdc++ -static-libgcc && \
make depend all && \
make install_sw
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "bzip2-1.0.8.tar.gz" "https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz" && \
tar xf "bzip2-1.0.8.tar.gz" && \
cd "bzip2-1.0.8" && \
make install PREFIX="$LOCALDESTDIR"
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "libpng-1.6.40.tar.gz" "http://prdownloads.sourceforge.net/libpng/libpng-1.6.40.tar.gz" && \
tar xf "libpng-1.6.40.tar.gz" && \
cd "libpng-1.6.40" && \
./configure --prefix="$LOCALDESTDIR" --disable-shared && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/fribidi/fribidi.git" && cd fribidi && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) 2>/dev/null || true && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "expat-2.5.0.tar.bz2" "https://github.com/libexpat/libexpat/releases/download/R_2_5_0/expat-2.5.0.tar.bz2" && \
tar xf "expat-2.5.0.tar.bz2" && \
cd "expat-2.5.0" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --without-docbook && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "freetype-2.13.1.tar.gz" "https://sourceforge.net/projects/freetype/files/freetype2/2.13.1/freetype-2.13.1.tar.gz" && \
tar xf "freetype-2.13.1.tar.gz" && \
cd "freetype-2.13.1" && \
./configure --prefix="$LOCALDESTDIR" --disable-shared --with-harfbuzz=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "fontconfig-2.14.2.tar.gz" "https://www.freedesktop.org/software/fontconfig/release/fontconfig-2.14.2.tar.gz" && \
tar xf "fontconfig-2.14.2.tar.gz" && \
cd "fontconfig-2.14.2" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install && \
cp fontconfig.pc "$LOCALDESTDIR/lib/pkgconfig/"
RUN git clone --depth 1 "https://github.com/harfbuzz/harfbuzz.git" && cd harfbuzz && \
mkdir build && cd build && \
meson setup -Denable_tools=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
ninja && \
ninja install
RUN git clone --depth 1 "https://github.com/zeromq/libzmq.git" && cd libzmq && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-static --disable-shared && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/libass/libass.git" && cd libass && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --disable-harfbuzz && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && cd fdk-aac && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "lame-3.100.tar.gz" "https://downloads.sourceforge.net/project/lame/lame/3.100/lame-3.100.tar.gz" && \
tar xf "lame-3.100.tar.gz" && \
cd "lame-3.100" && \
./configure --prefix="$LOCALDESTDIR" --enable-expopt=full --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "opus-1.4.tar.gz" "https://ftp.osuosl.org/pub/xiph/releases/opus/opus-1.4.tar.gz" && \
tar xf "opus-1.4.tar.gz" && \
cd "opus-1.4" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --enable-static --disable-doc && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/Haivision/srt.git" && cd srt && \
mkdir build && \
cd build && \
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DOPENSSL_USE_STATIC_LIBS=ON -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON -DENABLE_CXX11:BOOLEAN=ON -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" -DENABLE_APPS=0 -DENABLE_EXAMPLES=0 && \
make -j $(nproc) && \
make install
RUN git clone "https://github.com/webmproject/libvpx.git" && cd libvpx && \
./configure --prefix="$LOCALDESTDIR" --as=nasm --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
make -j $(nproc) && \
make install
RUN git clone "https://code.videolan.org/videolan/x264" && cd x264 && \
./configure --prefix="$LOCALDESTDIR" --enable-static && \
make -j $(nproc) && \
make install
RUN git clone "https://bitbucket.org/multicoreware/x265_git.git" && cd x265_git/build && \
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG $CXXFLAGS" && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && cd SVT-AV1/Build && \
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && cd dav1d && \
mkdir build && cd build && \
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
ninja && \
ninja install
RUN git clone --depth 1 https://git.videolan.org/git/ffmpeg/nv-codec-headers && cd nv-codec-headers && \
make install PREFIX="$LOCALDESTDIR"
RUN git clone --depth 1 https://git.ffmpeg.org/ffmpeg.git && cd ffmpeg && \
./configure \
--pkg-config-flags=--static \
--extra-cflags="-fopenmp -DZMG_STATIC" \
--extra-ldflags="-fopenmp -Wl,--copy-dt-needed-entries -Wl,--allow-multiple-definition" \
--enable-runtime-cpudetect \
--prefix=/usr/local \
--disable-debug \
--disable-doc \
--disable-ffplay \
--disable-shared \
--enable-gpl \
--enable-version3 \
--enable-nonfree \
--enable-small \
--enable-static \
--enable-libass \
--enable-fontconfig \
--enable-libfdk-aac \
--enable-libfribidi \
--enable-libfreetype \
--enable-libharfbuzz \
--enable-libmp3lame \
--enable-libopus \
--enable-libsrt \
--enable-libvpx \
--enable-libx264 \
--enable-libx265 \
--enable-libzmq \
--enable-nonfree \
--enable-openssl \
--enable-libsvtav1 \
--enable-libdav1d \
--enable-nvenc && \
make -j $(nproc) && \
make install
RUN strip /usr/local/bin/ffmpeg /usr/local/bin/ffprobe
WORKDIR /
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
COPY <<-EOT /run.sh
#!/bin/sh
if [ ! -f /db/ffplayout.db ]; then
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
fi
/usr/bin/ffplayout -l "0.0.0.0:8787"
EOT
RUN chmod +x /run.sh
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
cd /tmp && \
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
cp ffplayout /usr/bin/ && \
mmkdir -p /usr/share/ffplayout/ && \
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
rm -rf /tmp/* && \
mkdir ${DB}
EXPOSE 8787
CMD ["/run.sh"]

211
docs/CONFIG.md Normal file
View File

@ -0,0 +1,211 @@
The configuration file **ffplayout.yml** has this sections:
---
```YAML
general:
stop_threshold: 11
```
Sometimes it can happen, that a file is corrupt but still playable,
this can produce an streaming error over all following files. The only way
in this case is, to stop ffplayout and start it again. Here we only say when
it stops, the starting process is in your hand. Best way is a **systemd service**
on linux. `stop_threshold` stop ffplayout, if it is async in time above this
value. A number below 3 can cause unexpected errors.
---
```YAML
mail:
subject: "Playout Error"
smpt_server: "mail.example.org"
smpt_port: 587
sender_addr: "ffplayout@example.org"
sender_pass: "12345"
recipient:
mail_level: "ERROR"
```
Send error messages to email address, like:
- missing playlist
- invalid json format
- missing clip path
leave recipient blank, if you don't need this.
`mail_level` can be: **WARNING, ERROR**
---
```YAML
logging:
log_to_file: True
backup_count: 7
log_path: "/var/log/ffplayout/"
log_level: "DEBUG"
ffmpeg_level: "ERROR"
```
Logging to file, if `log_to_file = False` > log to console.
`backup_count` says how long log files will be saved in days.
Path to **/var/log/** only if you run this program as *deamon*.
`log_level` can be: **DEBUG, INFO, WARNING, ERROR**
`ffmpeg_level` can be: **INFO, WARNING, ERROR**
---
```YAML
processing:
width: 1024
height: 576
aspect: 1.778
fps: 25
add_logo: True
logo: "docs/logo.png"
logo_scale: "100:-1"
logo_opacity: 0.7
logo_filter: "overlay=W-w-12:12"
add_loudnorm: False
loud_I: -18
loud_TP: -1.5
loud_LRA: 11
output_count: 1
```
ffmpeg pre-compression settings, all clips get prepared in that way,
so the input for the final compression is unique.
- `aspect` mus be a float number.
- with `logo_scale = 100:-1` logo can be scaled
- with `logo_opacity` logo can make transparent
- with `logo_filter = overlay=W-w-12:12` you can modify the logo position
- with `use_loudnorm` you can activate single pass EBU R128 loudness normalization
- `loud_*` can adjust the loudnorm filter
- `output_count` sets the outputs for the filtering, > 1 gives the option to use the same filters for multiple outputs. This outputs can be taken in 'stream_param', names will be vout2, vout3;
aout2, aout2 etc.
**INFO:** output is progressive!
---
```YAML
ingest:
stream_input: >-
-f live_flv
-listen 1
-i rtmp://localhost:1936/live/stream
```
**ingest** works only in combination with output -> mode = **live_switch**!
It run a server for a ingest stream. This stream will override the normal streaming
until is done.
There is no authentication, this is up to you. The recommend way is to set address to localhost, stream to a local server with authentication and from there stream to this app.
---
```YAML
play:
mode: playlist
```
Set playing mode, like **playlist**; **folder**, or your own custom one.
---
```YAML
playlist:
path: "/playlists"
day_start: "5:59:25"
length: "24:00:00"
```
Put only the root path here, for example: **"/playlists"**.
Subfolders is read by the script and needs this structur:
- **"/playlists/2018/01"** (/playlists/year/month)
`day_start` means at which time the playlist should start. Leave `day_start` blank when playlist should always start at the begin.
`length` represent the target length from playlist, when is blank real length will not consider.
---
```YAML
storage:
path: "/mediaStorage"
filler_clip: "/mediaStorage/filler/filler.mp4"
extensions:
- ".mp4"
- ".mkv"
shuffle: True
```
Play ordered or ramdomly files from path, `filler_clip` is for fill the end
to reach 24 hours, it will loop when is necessary. `extensions:` search only files
with this extension, add as many as you want. Set `shuffle` to **True** to pick files randomly.
---
```YAML
text:
add_text: True
over_pre: False
bind_address: "tcp://127.0.0.1:5555"
fontfile: "/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf"
text_from_filename: False
style: "x=(w-tw)/2:y=(h-line_h)*0.9:fontsize=24:fontcolor=#ffffff:box=1:boxcolor=#000000:boxborderw=4"
regex: "^(.*)_"
```
Overlay text in combination with [messenger](https://github.com/ffplayout/messenger) or the web [frontend](https://github.com/ffplayout/ffplayout-frontend).
On windows `fontfile` path need to be like this: **C\:/WINDOWS/fonts/DejaVuSans.ttf**.
In a standard environment the filter drawtext node is: **Parsed_drawtext_2**.
`over_pre` if True text will be overlay in pre processing. Continue same text
over multiple files is in that mode not possible.
`text_from_filename` activate the extraction from text of a filename. With `style` you can define the drawtext parameters like position, color, etc. Post Text over API will override this.
With `regex` you can format file names, to get a title from it.
---
```YAML
out:
mode: 'stream'
preview: False
preview_param: >-
-s 512+288
-c:v libx264
-crf 24
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate 800k
-bufsize 1600k
-preset ultrafast
-profile:v Main
-level 3.1
-c:a aac
-ar 44100
-b:a 128k
-flags +global_header
-f flv rtmp://preview.local/live/stream
stream_param: >-
-c:v libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate 1300k
-bufsize 2600k
-preset medium
-profile:v Main
-level 3.1
-c:a aac
-ar 44100
-b:a 128k
-flags +global_header
-f flv rtmp://localhost/live/stream
```
The final ffmpeg post compression, Set the settings to your needs!
`mode` has the standard options **desktop**, **hls**, **live_switch**, **stream**. Self made outputs
can be define, by adding script in output folder with an **output()** function inside.
'preview' works only in streaming output and creates a separate preview stream.
For output mode hls, output can look like:
```YAML
[...]
-flags +cgop
-f hls
-hls_time 6
-hls_list_size 600
-hls_flags append_list+delete_segments+omit_endlist+program_date_time
-hls_segment_filename /var/www/srs/live/stream-%09d.ts /var/www/srs/live/stream.m3u8
```

55
docs/INSTALL.md Normal file
View File

@ -0,0 +1,55 @@
**ffplayout_engine Installation**
================
Here are a description on how to install *ffplayout engine* on a standard Linux server.
Requirements
-----
- python version 3.7+
- **ffmpeg v4.2+** and **ffprobe**
Installation
-----
- install **ffmpeg**, **ffprobe** (and **ffplay** if you need the preview mode)
- clone repo to **/opt/**: `git clone https://github.com/ffplayout/ffplayout_engine.git`
- `cd /opt/ffplayout_engine`
- create virtual environment: `virtualenv -p python3 venv`
- run `source ./venv/bin/activate`
- install dependencies: `pip3 install -r requirements.txt`
- create logging folder: **/var/log/ffplayout**
- create playlists folder, in that format: **/playlists/year/month**
- create folder for media storage: **/tv-media**
- set variables in config file to your needs
Single Channel Setup
-----
**systemd** is required
- copy **docs/ffplayout_engine.service** to **/etc/systemd/system/**
- copy **ffplayout.yml** to **/etc/ffplayout/**
- change user and group in service file (for example to **www-data**)
- activate service: `sudo systemctl enable ffplayout_engine`
- edit **/etc/ffplayout/ffplayout.yml**
- when playlists are exists, run service: `sudo systemctl start ffplayout_engine`
Multi Channel Setup
-----
- copy **docs/ffplayout_engine-multichannel.service** to **/etc/systemd/system/**
- change user and group in service file (for example to **www-data**)
- copy **ffplayout.yml** to **/etc/ffplayout/ffplayout-001.yml**
- copy **docs/supervisor** folder to **/etc/ffplayout/**
- every channel needs its own engine config **ffplayout-002.yml**, **ffplayout-003.yml**, etc.
- every channel needs also its own service file under **/etc/ffplayout/supervisor/config.d**
- create for every channel a subfolder for logging: **/var/log/ffplayout/channel-001**, **/var/log/ffplayout/channel-002**, etc.
- edit **/etc/ffplayout/ffplayout-00*.yml**
- when you want to use the web frontend, create only the first channel and the other ones in the frontend
- activate service: `sudo systemctl enable ffplayout_engine-multichannel`
- when playlists are exists, run service: `sudo systemctl start ffplayout_engine-multichannel`
Using it Without Installation
-----
Of course you can just run it too. Install only the dependencies from **requirements.txt** and run it with **python ffplayout.py [parameters]**.

View File

@ -1,52 +0,0 @@
**ffplayout-engine Documentation**
================
### **[For Developer](/docs/developer.md)**
Learn how to setup a developer environment and to cross compile for different platforms.
### **[Install](/docs/install.md)**
How to install ffplayout with API and frontend.
### **[Folder Mode](/docs/folder_mode.md)**
Learn more about playing the content of a folder.
### **[Live Ingest](/docs/live_ingest.md)**
Using live ingest to inject a live stream.
### **[Output Modes](/docs/output.md)**
The different output modes.
### **[Playlist Generation](/docs/playlist_gen.md)**
Generate playlists based on template.
### **[Multi Audio Tracks](/docs/multi_audio.md)**
Output multiple audio tracks.
### **[Custom Filter](/docs/custom_filters.md)**
Apply self defined audio/video filters.
### **[Preview Stream](/docs/preview_stream.md)**
Setup and use a preview stream.
### **[Remote Sources](/docs/remote_source.md)**
Use of remote sources, like https://example.org/video.mp4
### **[ffplayout API](/docs/api.md)**
Control the engine, playlist and config with a ~REST API
### **[Stream Copy](/docs/stream_copy.md)**
Copy audio and or video stream
### **[Advanced Settings](/docs/advanced_settings.md)**

View File

@ -1,94 +0,0 @@
## Advanced settings
With **advanced settings** you can control all ffmpeg inputs/decoder/output and filters.
> **_Note:_** Changing these settings is for advanced users only! There will be no support or guarantee that it will work and be stable after changing them!
For changing this settings you need to have knowledge about hardware encoding with ffmpeg. Good starting points are:
- [HWAccelIntro](https://trac.ffmpeg.org/wiki/HWAccelIntro)
- [VAAPI](https://trac.ffmpeg.org/wiki/Hardware/VAAPI)
- [QuickSync](https://trac.ffmpeg.org/wiki/Hardware/QuickSync)
### Example config
##### Here an example with Intel QuickSync:
```YAML
help: Changing these settings is for advanced users only! There will be no support or guarantee that ffplayout will be stable after changing them.
decoder:
input_param: -hwaccel qsv -init_hw_device qsv=hw -filter_hw_device hw -hwaccel_output_format qsv
# output_param get also applied to ingest instance.
output_param: -c:v mpeg2_qsv -g 1 -b:v 50000k -minrate 50000k -maxrate 50000k -bufsize 25000k -c:a s302m -strict -2 -sample_fmt s16 -ar 48000 -ac 2
filters:
deinterlace: deinterlace_qsv
pad_scale_w: scale_qsv={}:-1
pad_scale_h: scale_qsv=-1:{}
pad_video: 'null' # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2
fps: vpp_qsv=framerate=25
scale: scale_qsv={}:{}
set_dar: 'null' # setdar=dar={}
fade_in: 'null' # fade=in:st=0:d=0.5
fade_out: 'null' # fade=out:st={}:d=1.0
overlay_logo_scale: 'null'
overlay_logo_fade_in: fade=in:st=0:d=1.0 # fade=in:st=0:d=1.0:alpha=1
overlay_logo_fade_out: fade=out:st={}:d=1.0 # fade=out:st={}:d=1.0:alpha=1
overlay_logo: hwupload=extra_hw_frames=64,format=qsv[l];[v][l]overlay_qsv={}:shortest=1
tpad: 'null' # tpad=stop_mode=add:stop_duration={}
drawtext_from_file: hwdownload,format=nv12,drawtext=text='{}':{}{} # drawtext=text='{}':{}{}
drawtext_from_zmq: hwdownload,format=nv12,zmq=b=tcp\\://'{}',drawtext@dyntext={} # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}
aevalsrc: # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000
afade_in: # afade=in:st=0:d=0.5
afade_out: # afade=out:st={}:d=1.0
apad: # apad=whole_dur={}
volume: # volume={}
split: # split={}{}
encoder:
# use `-hwaccel vulkan` when output mode is desktop
input_param: -hwaccel qsv -init_hw_device qsv=hw -filter_hw_device hw -hwaccel_output_format qsv
ingest:
input_param: -hwaccel qsv -init_hw_device qsv=hw -filter_hw_device hw -hwaccel_output_format qsv
```
##### Here an example with Nvidia HW processing
```YAML
help: Changing these settings is for advanced users only! There will be no support or guarantee that it will be stable after changing them.
decoder:
input_param: -thread_queue_size 1024 -hwaccel_device 0 -hwaccel cuvid -hwaccel_output_format cuda
# output_param get also applied to ingest instance.
output_param: -c:v h264_nvenc -preset p2 -tune ll -b:v 50000k -minrate 50000k -maxrate 50000k -bufsize 25000k -c:a s302m -strict -2 -sample_fmt s16 -ar 48000 -ac 2
filters:
deinterlace: 'null'
pad_scale_w: 'null' # scale={}:-1
pad_scale_h: 'null' # scale=-1:{}
pad_video: 'null' # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2
fps: 'null' # fps={}
scale: scale_cuda={}:{}:interp_algo=lanczos:force_original_aspect_ratio=decrease # scale={}:{}
set_dar: 'null' # setdar=dar={}
fade_in: hwdownload,format=nv12,fade=in:st=0:d=0.5,format=nv12,hwupload_cuda # fade=in:st=0:d=0.5
fade_out: hwdownload,format=nv12,fade=out:st={}:d=1.0,format=nv12,hwupload_cuda # fade=out:st={}:d=1.0
overlay_logo_scale: 'null' # scale={}
overlay_logo_fade_in: fade=in:st=0:d=1.0 # fade=in:st=0:d=1.0:alpha=1
overlay_logo_fade_out: fade=out:st={}:d=1.0 # fade=out:st={}:d=1.0:alpha=1
overlay_logo: format=nv12,hwupload_cuda[l];[v][l]overlay_cuda=W-w-12:12:shortest=1,hwdownload,format=nv12
tpad: # tpad=stop_mode=add:stop_duration={}
drawtext_from_file: # drawtext=text='{}':{}{}
drawtext_from_zmq: # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}
aevalsrc: # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000
afade_in: # afade=in:st=0:d=0.5
afade_out: # afade=out:st={}:d=1.0
apad: # apad=whole_dur={}
volume: # volume={}
split: # split={}{}
encoder:
input_param:
ingest:
input_param: -thread_queue_size 1024 -hwaccel_device 0 -hwaccel cuvid -hwaccel_output_format cuda
```
---
**At the moment this function is _experimental_, if you think you found a bug: check full decoder/encoder/ingest command with ffmpeg in terminal. When there the command works you can open a bug report issue.**
Please don't open issues for general command line helps!

View File

@ -1,401 +0,0 @@
### Possible endpoints
Run the API thru the systemd service, or like:
```BASH
ffplayout -l 127.0.0.1:8787
```
For all endpoints an (Bearer) authentication is required.\
`{id}` represent the channel id, and at default is 1.
#### User Handling
**Login**
```BASH
curl -X POST http://127.0.0.1:8787/auth/login/ -H "Content-Type: application/json" \
-d '{ "username": "<USER>", "password": "<PASS>" }'
```
**Response:**
```JSON
{
"id": 1,
"mail": "user@example.org",
"username": "<USER>",
"token": "<TOKEN>"
}
```
From here on all request **must** contain the authorization header:\
`"Authorization: Bearer <TOKEN>"`
**Get current User**
```BASH
curl -X GET 'http://127.0.0.1:8787/api/user' -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
**Get User by ID**
```BASH
curl -X GET 'http://127.0.0.1:8787/api/user/2' -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
```BASH
curl -X GET 'http://127.0.0.1:8787/api/users' -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
**Update current User**
```BASH
curl -X PUT http://127.0.0.1:8787/api/user/1 -H 'Content-Type: application/json' \
-d '{"mail": "<MAIL>", "password": "<PASS>"}' -H 'Authorization: Bearer <TOKEN>'
```
**Add User**
```BASH
curl -X POST 'http://127.0.0.1:8787/api/user/' -H 'Content-Type: application/json' \
-d '{"mail": "<MAIL>", "username": "<USER>", "password": "<PASS>", "role_id": 1, "channel_id": 1}' \
-H 'Authorization: Bearer <TOKEN>'
```
```BASH
curl -X GET 'http://127.0.0.1:8787/api/user/2' -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
#### Settings
**Get Settings from Channel**
```BASH
curl -X GET http://127.0.0.1:8787/api/channel/1 -H "Authorization: Bearer <TOKEN>"
```
**Response:**
```JSON
{
"id": 1,
"name": "Channel 1",
"preview_url": "http://localhost/live/preview.m3u8",
"extra_extensions": "jpg,jpeg,png",
"utc_offset": "+120"
}
```
**Get settings from all Channels**
```BASH
curl -X GET http://127.0.0.1:8787/api/channels -H "Authorization: Bearer <TOKEN>"
```
**Update Channel**
```BASH
curl -X PATCH http://127.0.0.1:8787/api/channel/1 -H "Content-Type: application/json" \
-d '{ "id": 1, "name": "Channel 1", "preview_url": "http://localhost/live/stream.m3u8", "extra_extensions": "jpg,jpeg,png"}' \
-H "Authorization: Bearer <TOKEN>"
```
**Create new Channel**
```BASH
curl -X POST http://127.0.0.1:8787/api/channel/ -H "Content-Type: application/json" \
-d '{ "name": "Channel 2", "preview_url": "http://localhost/live/channel2.m3u8", "extra_extensions": "jpg,jpeg,png" }' \
-H "Authorization: Bearer <TOKEN>"
```
**Delete Channel**
```BASH
curl -X DELETE http://127.0.0.1:8787/api/channel/2 -H "Authorization: Bearer <TOKEN>"
```
#### ffplayout Config
**Get Advanced Config**
```BASH
curl -X GET http://127.0.0.1:8787/api/playout/advanced/1 -H 'Authorization: Bearer <TOKEN>'
```
Response is a JSON object
**Update Advanced Config**
```BASH
curl -X PUT http://127.0.0.1:8787/api/playout/advanced/1 -H "Content-Type: application/json" \
-d { <CONFIG DATA> } -H 'Authorization: Bearer <TOKEN>'
```
**Get Config**
```BASH
curl -X GET http://127.0.0.1:8787/api/playout/config/1 -H 'Authorization: Bearer <TOKEN>'
```
Response is a JSON object
**Update Config**
```BASH
curl -X PUT http://127.0.0.1:8787/api/playout/config/1 -H "Content-Type: application/json" \
-d { <CONFIG DATA> } -H 'Authorization: Bearer <TOKEN>'
```
#### Text Presets
Text presets are made for sending text messages to the ffplayout engine, to overlay them as a lower third.
**Get all Presets**
```BASH
curl -X GET http://127.0.0.1:8787/api/presets/ -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
**Update Preset**
```BASH
curl -X PUT http://127.0.0.1:8787/api/presets/1 -H 'Content-Type: application/json' \
-d '{ "name": "<PRESET NAME>", "text": "<TEXT>", "x": "<X>", "y": "<Y>", "fontsize": 24, "line_spacing": 4, "fontcolor": "#ffffff", "box": 1, "boxcolor": "#000000", "boxborderw": 4, "alpha": 1.0, "channel_id": 1 }' \
-H 'Authorization: Bearer <TOKEN>'
```
**Add new Preset**
```BASH
curl -X POST http://127.0.0.1:8787/api/presets/1/ -H 'Content-Type: application/json' \
-d '{ "name": "<PRESET NAME>", "text": "TEXT>", "x": "<X>", "y": "<Y>", "fontsize": 24, "line_spacing": 4, "fontcolor": "#ffffff", "box": 1, "boxcolor": "#000000", "boxborderw": 4, "alpha": 1.0, "channel_id": 1 }' \
-H 'Authorization: Bearer <TOKEN>'
```
**Delete Preset**
```BASH
curl -X DELETE http://127.0.0.1:8787/api/presets/1 -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
### ffplayout controlling
here we communicate with the engine for:
- jump to last or next clip
- reset playlist state
- get infos about current, next, last clip
- send text to the engine, for overlaying it (as lower third etc.)
**Send Text to ffplayout**
```BASH
curl -X POST http://127.0.0.1:8787/api/control/1/text/ \
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>' \
-d '{"text": "Hello from ffplayout", "x": "(w-text_w)/2", "y": "(h-text_h)/2", fontsize": "24", "line_spacing": "4", "fontcolor": "#ffffff", "box": "1", "boxcolor": "#000000", "boxborderw": "4", "alpha": "1.0"}'
```
**Control Playout**
- next
- back
- reset
```BASH
curl -X POST http://127.0.0.1:8787/api/control/1/playout/ -H 'Content-Type: application/json'
-d '{ "command": "reset" }' -H 'Authorization: Bearer <TOKEN>'
```
**Get current Clip**
```BASH
curl -X GET http://127.0.0.1:8787/api/control/1/media/current
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```
**Response:**
```JSON
{
"media": {
"category": "",
"duration": 154.2,
"out": 154.2,
"in": 0.0,
"source": "/opt/tv-media/clip.mp4"
},
"index": 39,
"ingest": false,
"mode": "playlist",
"played": 67.808
}
```
#### ffplayout Process Control
Control ffplayout process, like:
- start
- stop
- restart
- status
```BASH
curl -X POST http://127.0.0.1:8787/api/control/1/process/
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
-d '{"command": "start"}'
```
#### ffplayout Playlist Operations
**Get playlist**
```BASH
curl -X GET http://127.0.0.1:8787/api/playlist/1?date=2022-06-20
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```
**Save playlist**
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
--data "{<JSON playlist data>}"
```
**Generate Playlist**
A new playlist will be generated and response.
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/generate/2022-06-20
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
/// --data '{ "paths": [<list of paths>] }' # <- data is optional
```
Or with template:
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/generate/2023-00-05
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
--data '{"template": {"sources": [\
{"start": "00:00:00", "duration": "10:00:00", "shuffle": true, "paths": ["path/1", "path/2"]}, \
{"start": "10:00:00", "duration": "14:00:00", "shuffle": false, "paths": ["path/3", "path/4"]}]}}'
```
**Delete Playlist**
```BASH
curl -X DELETE http://127.0.0.1:8787/api/playlist/1/2022-06-20
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```
### Log file
**Read Log File**
```BASH
curl -X GET http://127.0.0.1:8787/api/log/1?date=2022-06-20
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```
### File Operations
**Get File/Folder List**
```BASH
curl -X POST http://127.0.0.1:8787/api/file/1/browse/ -H 'Content-Type: application/json'
-d '{ "source": "/" }' -H 'Authorization: Bearer <TOKEN>'
```
**Create Folder**
```BASH
curl -X POST http://127.0.0.1:8787/api/file/1/create-folder/ -H 'Content-Type: application/json'
-d '{"source": "<FOLDER PATH>"}' -H 'Authorization: Bearer <TOKEN>'
```
**Rename File**
```BASH
curl -X POST http://127.0.0.1:8787/api/file/1/rename/ -H 'Content-Type: application/json'
-d '{"source": "<SOURCE>", "target": "<TARGET>"}' -H 'Authorization: Bearer <TOKEN>'
```
**Remove File/Folder**
```BASH
curl -X POST http://127.0.0.1:8787/api/file/1/remove/ -H 'Content-Type: application/json'
-d '{"source": "<SOURCE>"}' -H 'Authorization: Bearer <TOKEN>'
```
**Upload File**
```BASH
curl -X PUT http://127.0.0.1:8787/api/file/1/upload/ -H 'Authorization: Bearer <TOKEN>'
-F "file=@file.mp4"
```
**Get File**
Can be used for preview video files
```BASH
curl -X GET http://127.0.0.1:8787/file/1/path/to/file.mp4
```
**Get Public**
Can be used for HLS Playlist and other static files in public folder
```BASH
curl -X GET http://127.0.0.1:8787/live/stream.m3u8
```
**Import playlist**
Import text/m3u file and convert it to a playlist
lines with leading "#" will be ignore
```BASH
curl -X PUT http://127.0.0.1:8787/api/file/1/import/ -H 'Authorization: Bearer <TOKEN>'
-F "file=@list.m3u"
```
**Program info**
Get program infos about given date, or current day
Examples:
* get program from current day
```BASH
curl -X GET http://127.0.0.1:8787/api/program/1/ -H 'Authorization: Bearer <TOKEN>'
```
* get a program range between two dates
```BASH
curl -X GET http://127.0.0.1:8787/api/program/1/?start_after=2022-11-13T12:00:00&start_before=2022-11-20T11:59:59 \
-H 'Authorization: Bearer <TOKEN>'
```
* get program from give day
```BASH
curl -X GET http://127.0.0.1:8787/api/program/1/?start_after=2022-11-13T10:00:00 \
-H 'Authorization: Bearer <TOKEN>'
```
### System Statistics
Get statistics about CPU, Ram, Disk, etc. usage.
```BASH
curl -X GET http://127.0.0.1:8787/api/system/1
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```

View File

@ -1,23 +0,0 @@
## Closed Captions
#### Note:
**This is only an _experimental feature_. Please be aware that bugs and unexpected behavior may occur. To utilize this feature, a version after 7.1 of FFmpeg is required. Importantly, there is currently no official support for this functionality.**
### Usage
**ffplayout** can handle closed captions in WebVTT format for HLS streaming.
The captions can be embedded in the file, such as in a [Matroska](https://www.matroska.org/technical/subtitles.html) file, or they can be a separate *.vtt file that shares the same filename as the video file. In either case, the processing option **vtt_enable** must be enabled, and the path to the **vtt_dummy** file must exist.
To encode the closed captions, the **hls** mode needs to be enabled, and specific output parameters must be provided. Heres an example:
```
-c:v libx264 -crf 23 -x264-params keyint=50:min-keyint=25:scenecut=-1 \
-maxrate 1300k -bufsize 2600k -preset faster -tune zerolatency \
-profile:v Main -level 3.1 -c:a aac -ar 44100 -b:a 128k -flags +cgop \
-muxpreload 0 -muxdelay 0 -f hls -hls_time 6 -hls_list_size 600 \
-hls_flags append_list+delete_segments+omit_endlist \
-var_stream_map v:0,a:0,s:0,sgroup:subs,sname:English,language:en-US,default:YES \
-master_pl_name master.m3u8 \
-hls_segment_filename \
live/stream-%d.ts live/stream.m3u8
```

View File

@ -1,121 +0,0 @@
## Custom filter
ffplayout allows the definition of a custom filter string. For this, the parameter **custom_filter** is available in the playout configuration under **processing**. The playlist can also contain a **custom_filter** parameter for each clip, with the same usage.
The filter outputs should end with `[c_v_out]` for video filters and `[c_a_out]` for audio filters. The filters will be applied to every clip and after the filters that unify the clips.
It is possible to apply only video filters, only audio filters, or both. For a better understanding, here are some examples:
#### Apply Gaussian blur and volume filter:
```YAML
custom_filter: "gblur=5[c_v_out];volume=0.5[c_a_out]"
```
#### Apply loudnorm filter:
```YAML
custom_filter: "loudnorm=I=-18:TP=-1.5:LRA=11[c_a_out]"
```
#### Add lower third:
```YAML
custom_filter: "[v_in];movie=/path/to/lower_third.png:loop=0,scale=1024:576,setpts=N/(25*TB)[lower];[v_in][lower]overlay=0:0:shortest=1[c_v_out]"
```
#### Overlay current time:
```YAML
custom_filter: "drawtext=text='%{localtime\:%H\\\:%M\\\:%S}':fontcolor=white:fontsize=40:x=w-tw-20:y=20:box=1:boxcolor=red@0.7:boxborderw=10[c_v_out]"
```
#### Scrolling text with static background:
```YAML
custom_filter: "drawbox=x=0:y=in_h-(in_h/6):w=in_w:h=60:t=fill:color=#000000@0x73,drawtext=text='Hello World':x='ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),w+4,w-w/12*mod(t-ld(1),12*(w+tw)/w))':y='main_h-(main_h/6)+20':fontsize=24:fontcolor=#f2f2f2"
```
Pay attention to the filter prefix `[v_in];`, this is necessary to get the output from the regular filters.
#### Paint effect
```YAML
custom_filter: "edgedetect=mode=colormix:high=0[c_v_out]"
```
### Where the filters applied in stream mode
The **custom filter** from **config -> processing** and from **playlist** got applied in the _decoder_ instance on every file:
```mermaid
flowchart LR
subgraph fileloop["file loop"]
direction LR
Input --> dec
subgraph filter["start new on file change"]
direction LR
dec["decoder / filtering / custom filter"]
end
end
subgraph fileout["constant output"]
direction LR
enc["encoder / text overlay"]
end
dec -- PIPE --> enc
enc --> output
```
#### When take which
* If you want to use for every clip a different filter chain, you should use the custom filter parameter from **playlist**.
* When you want to use the same filter for every clip you can use the custom filter from **config -> processing**.
### Complex example
This example takes a image and a animated mov clip with alpha and overlays them two times on different positions in time:
```YAML
custom_filter: "[v_in];movie=image_input.png:s=v,loop=loop=250.0:size=1:start=0,scale=1024:576,split=2[lower_1_out_1][lower_1_out_2];[lower_1_out_1]fifo,fade=in:duration=0.5:alpha=1,fade=out:start_time=9.5:duration=0.5:alpha=1,setpts=PTS+5.0/TB[fade_1];[v_in][fade_1]overlay=enable=between(t\,5.0\,15.0)[base_1];[lower_1_out_2]fifo,fade=in:duration=0.5:alpha=1,fade=out:start_time=9.5:duration=0.5:alpha=1,setpts=PTS+30.0/TB[fade_2];[base_1][fade_2]overlay=enable=between(t\,30.0\,40.0)[base_2];movie=animated_input.mov:s=v,scale=1024:576,split=2[lower_2_out_1][lower_2_out_2];[lower_2_out_1]fifo,setpts=PTS+20.0/TB[layer_1];[base_2][layer_1]overlay=repeatlast=0[base_3];[lower_2_out_2]fifo,setpts=PTS+50.0/TB[layer_2];[base_3][layer_2]overlay=repeatlast=0[c_v_out]"
```
And here are the explanation for each filter:
```PYTHON
# get input from video
[v_in];
# load the image, loops it for 10 seconds (25 FPS * 10), scale it to the target resolution, splits it into two outputs
movie=image_input.png:s=v,loop=loop=250.0:size=1:start=0,scale=1024:576,split=2[lower_1_out_1][lower_1_out_2];
# take output one from image, fades it in for 0.5 seconds, fades it out for 0.5 seconds, shift the start time to 00:00:05 (5 seconds)
[lower_1_out_1]fifo,fade=in:duration=0.5:alpha=1,fade=out:start_time=9.5:duration=0.5:alpha=1,setpts=PTS+5.0/TB[fade_1];
# overlay first output on top of the video, between second 5 and 15
[v_in][fade_1]overlay=enable=between(t\,5.0\,15.0)[base_1];
# take output two from image, fades it in for 0.5 seconds, fades it out for 0.5 seconds, shift the start time to 00:00:30 (30 seconds)
[lower_1_out_2]fifo,fade=in:duration=0.5:alpha=1,fade=out:start_time=9.5:duration=0.5:alpha=1,setpts=PTS+30.0/TB[fade_2];
# overlay second output on top of output from last overlay, between second 30 and 40
[base_1][fade_2]overlay=enable=between(t\,30.0\,40.0)[base_2];
# load the animated clip with alpha, scale it to the target resolution, splits it into two outputs
movie=animated_input.mov:s=v,scale=1024:576,split=2[lower_2_out_1][lower_2_out_2];
# shift the start from first animated clip to second 20
[lower_2_out_1]fifo,setpts=PTS+20.0/TB[layer_1];
# overlay the shifted animation on top of the last image overlay
[base_2][layer_1]overlay=repeatlast=0[base_3];
# shift the start from second animated clip to second 50
[lower_2_out_2]fifo,setpts=PTS+50.0/TB[layer_2];
# overlay the second shifted animation on top of the last overlay
[base_3][layer_2]overlay=repeatlast=0[c_v_out]
```
Check ffmpeg [filters](https://ffmpeg.org/ffmpeg-filters.html) documentation, and find out which other filters ffmpeg has and how to apply.

View File

@ -1,114 +0,0 @@
## Build ffplayout
For compiling use always the news Rust version, the best is to install it from [rustup](https://rustup.rs/).
### Static Linking
Running `cargo build` ends up in a binary which depend on **libc.so**. But you can compile also the binary totally static:
- install musl compiler:
- `dnf install musl-gcc`
- add target:
- `rustup target add x86_64-unknown-linux-musl`
Compile with: `cargo build --release --target=x86_64-unknown-linux-musl`.
This release should run on any Linux distro.
**Note: You can also create a static version with Cross Toolchain. For this, follow the next steps.**
### Cross Compile
For cross compiling install docker or podman and latest [cross-rs](https://github.com/cross-rs/cross):
```
cargo install cross --git https://github.com/cross-rs/cross
```
To build for windows, run: `cross build --release --target x86_64-pc-windows-gnu`\
To build for linux aarch64: `cross build --release --target aarch64-unknown-linux-gnu`
Etc.
### Compile from Linux for macOS
Follow [cross-toolchains](https://github.com/cross-rs/cross-toolchains) instruction to add macOS support to cross.
I created my image with:
```
cargo build-docker-image x86_64-apple-darwin-cross \
--build-arg 'MACOS_SDK_URL=https://github.com/joseluisq/macosx-sdks/releases/download/12.3/MacOSX12.3.sdk.tar.xz'
```
Build then with:
```
cross build --release --target aarch64-apple-darwin
```
```
### Create debian DEB and RHEL RPM packages
install:
- `cargo install cargo-deb`
- `cargo install cargo-generate-rpm`
Compile to your target system with cargo or cross, and run:
```Bash
# for debian based systems:
cargo deb --no-build --target=x86_64-unknown-linux-musl
# for armhf
cargo deb --no-build --target=armv7-unknown-linux-gnueabihf --variant=armhf -p ffplayout --manifest-path=ffplayout-engine/Cargo.toml
# for arm64
cargo deb --no-build --target=aarch64-unknown-linux-gnu --variant=arm64 -p ffplayout --manifest-path=ffplayout-engine/Cargo.toml
# for rhel based systems:
cargo generate-rpm --target=x86_64-unknown-linux-musl
```
## Generate types for Frontend
The frontend uses TypeScript, to generate types for the rust structs run: `cargo test`.
The generated types are then in [types folder](/frontend/types).
## Setup Frontend
Make sure to install the dependencies:
```bash
# yarn
yarn install
# npm
npm install
# pnpm
pnpm install --shamefully-hoist
```
## Development Server
Start the development server on http://localhost:3000
```bash
npm run dev
```
## Production
Build the application for production:
```bash
npm run build
```
Locally preview production build:
```bash
npm run preview
```
Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information.

View File

@ -0,0 +1,17 @@
[Unit]
Description=Supervisor process control system for UNIX
Documentation=http://supervisord.org
After=network.target
[Service]
ExecStart=/opt/ffplayout_engine/venv/bin/supervisord -n -c /etc/ffplayout/supervisor/supervisord.conf
ExecStop=/opt/ffplayout_engine/venv/bin/supervisorctl $OPTIONS shutdown
ExecReload=/opt/ffplayout_engine/venv/bin/supervisorctl $OPTIONS reload
KillMode=process
Restart=on-failure
RestartSec=5s
User=root
Group=root
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,14 @@
[Unit]
Description=python and ffmpeg based playout
After=network.target
[Service]
ExecStart=/opt/ffplayout_engine/venv/bin/python /opt/ffplayout_engine/ffplayout.py
ExecReload=/bin/kill -1 $MAINPID
Restart=always
RestartSec=1
User=root
Group=root
[Install]
WantedBy=multi-user.target

View File

@ -1,9 +0,0 @@
### Folder Mode
ffplayout can play files from a folder; no playlists are required for this mode. This folder is monitored for changes, and when new files are added or deleted, they are registered and updated accordingly.
You just need to set `mode: folder` in the config under `processing:`, and under `storage:`, you have to specify the correct folder and the file extensions you want to scan for.
Additionally, there is a **shuffle** mode. If this is activated, the files will be played randomly.
If shuffle mode is off, the clips will be played in sorted order.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 306 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 180 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 173 KiB

View File

@ -1,33 +0,0 @@
In some situations, application closure may occur in conjunction with Live Ingest.
Here is an example in combination with SRS:
When a live stream is sent, it is forwarded to ffplayout, which then switches the TV program to the live stream.
Problems can occur if the internet connection for the live stream is not stable. In such cases, timeouts can occur, SRS breaks the connection to the playout, and the entire ffplayout process has to be restarted. The default timeout is 5000ms, or 5 seconds.
The timeout can be changed in SRS in the respective vhosts with:
```NGINX
publish {
normal_timeout 30000;
}
```
Here the new timeout would be 30 seconds.
The error behavior can be simulated under Linux using the tool **tc**. Then carry out the following steps:
- Start SRS
- start ffplayout, with rtmp target SRS
- after a few seconds start a livestream to ffplayout
- shortly afterwards start **tc**: `tc qdisc add dev eth0 root netem loss 70%`
- wait until the timeout time is exceeded
- ffplayout should close the process with encoder errors
- undo **tc** rule: `tc qdisc delete dev eth0 root`
`eth0` must be replaced with the physical network interface.
Reference:
- [simulate-delayed-and-dropped-packets-on-linux](https://stackoverflow.com/questions/614795/simulate-delayed-and-dropped-packets-on-linux)
- [publish-normal-timeout](https://ossrs.io/lts/en-us/docs/v4/doc/special-control/#publish-normal-timeout)

View File

@ -1,29 +0,0 @@
### Install ffplayout
**Note:** This is the official and supported way.
ffplayout provides ***.deb** and ***.rpm** packages, which makes it easier to install and use, but there are still some steps to follow.
1. Download the latest ffplayout from the [release](https://github.com/ffplayout/ffplayout/releases/latest) page and place the package in the **/tmp** folder
2. Install it with `apt install /tmp/ffplayout_<VERSION>_amd64.deb`
3. Install ffmpeg/ffprobe, or compile and copy them to **/usr/local/bin/**
4. Initialize the defaults and add a global admin user: `sudo -u ffpu ffplayout -i`
5. Use a reverse proxy for SSL; the port is **8787**
6. Log in with your browser. The address without a proxy would be: **http://[IP ADDRESS]:8787**
### Manual Install
**Note:** This is for advanced users only.
- Install ffmpeg/ffprobe, or compile and copy them to **/usr/local/bin/**
- Download the latest archive from the [release](https://github.com/ffplayout/ffplayout/releases/latest) page
- Copy the ffplayout binary to `/usr/bin/`
- Copy **assets/ffplayout.yml** to `/etc/ffplayout`
- Create the folder `/var/log/ffplayout`
- Create the system user **ffpu**
- Give ownership of `/etc/ffplayout` and `/var/log/ffplayout` to **ffpu**
- Copy **assets/ffplayout.service** to `/etc/systemd/system`
- Copy **assets/ffplayout.1.gz** to `/usr/share/man/man1/`
- Copy the **public** folder to `/usr/share/ffplayout/`
- Activate the service and run it: `systemctl enable --now ffplayout`
- Initialize the defaults and add a global admin user: `sudo -u ffpu ffplayout -i`

View File

@ -1,27 +0,0 @@
### Live Ingest
With live ingest, you have the possibility to switch from playlist or folder mode to a live stream.
It works by creating an ffmpeg instance in _listen_ (_server_) mode. For example, when streaming over RTMP, you can set the ingest input parameters to:
```
-f live_flv -listen 1 -i rtmp://0.0.0.0:1936/live/my-secrete-streaming-key
```
For SRT you could use:
```
-f mpegts -i 'srt://0.0.0.0:40077?mode=listener&passphrase=12345abcde'
```
Keep in mind that the ingest mode **can't** pull from a server; it can only act as its own server and listen for incoming streams.
When it detects an incoming stream, it will stop the currently playing content and switch to the live source. The output will not be interrupted, so you will have a continuous output stream.
In rare cases, it may happen that, for a short moment after switching, the image freezes, but then it will continue. Also, a brief frame flicker might occur.
You should know that **ffmpeg, in its current version, has no authentication mechanism and simply listens to the protocol and port (no app and stream name).**
ffplayout addresses this issue by monitoring the output from ffmpeg. When the input is **rtmp** and the app or stream name differs from the configuration, it stops the ingest process. So, in a way, we have some control over which streams are accepted and which are not.
In theory, you can use any [protocol](https://ffmpeg.org/ffmpeg-protocols.html) from ffmpeg that supports a **listen** mode.

View File

Before

Width:  |  Height:  |  Size: 6.6 KiB

After

Width:  |  Height:  |  Size: 6.6 KiB

View File

@ -1,73 +0,0 @@
## Multiple Audio Tracks
**\* This is an experimental feature and more intended for advanced users. Use it with caution!**
With _ffplayout_, you can output streams with multiple audio tracks, with some limitations:
* Not all formats support multiple audio tracks. For example, _flv/rtmp_ doesn't support it.
* In your output parameters, you need to set the correct mapping.
ffmpeg filter usage and encoding parameters can become very complex, so it may happen that not every combination works out of the box.
To get a better idea of what works, you can examine [engine_cmd](../tests/src/engine_cmd.rs).
If you are outputting a single video stream with multiple audio tracks, for example with the `srt://` protocol, you only need to set the correct `audio_tracks:` count in your config under `processing:`.
For multiple video resolutions and multiple audio tracks, the parameters could look like:
```YAML
out:
...
mode: stream
output_param: >-
-map 0:v
-map 0:a:0
-map 0:a:1
-c:v libx264
-c:a aac
-ar 44100
-b:a 128k
-flags +global_header
-f mpegts
srt://127.0.0.1:40051
-map 0:v
-map 0:a:0
-map 0:a:1
-s 512x288
-c:v libx264
-c:a aac
-ar 44100
-b:a 128k
-flags +global_header
-f mpegts
srt://127.0.0.1:40052
```
If you need HLS output with multiple resolutions and audio tracks, you can try something like:
```YAML
out:
...
mode: hls
output_param: >-
-filter_complex [0:v]split=2[v1_out][v2];[v2]scale=w=512:h=288[v2_out];[0:a:0]asplit=2[a_0_1][a_0_2];[0:a:1]asplit=2[a_1_1][a_1_2]
-map [v1_out]
-map [a_0_1]
-map [a_1_1]
-c:v libx264
-flags +cgop
-c:a aac
-map [v2_out]
-map [a_0_2]
-map [a_1_2]
-c:v:1 libx264
-flags +cgop
-c:a:1 aac
-f hls
-hls_time 6
-hls_list_size 600
-hls_flags append_list+delete_segments+omit_endlist
-hls_segment_filename /usr/share/ffplayout/public/live/stream_%v-%d.ts
-master_pl_name master.m3u8
-var_stream_map "v:0,a:0,a:1,name:720p v:1,a:2,a:3,name:288p"
/usr/share/ffplayout/public/live/stream_%v.m3u8
```

View File

@ -1,184 +0,0 @@
ffplayout supports different types of outputs, let's explain them a bit:
## Stream
The streaming output can be used for any kind of classical streaming, such as **rtmp, srt, rtp**, etc. Any streaming type supported by ffmpeg should work.
**Remember that you need a streaming server as a destination if you want to use this mode.**
For example, you can use:
- [SRS](https://github.com/ossrs/srs)
- [OvenMediaEngine](https://www.ovenmediaengine.com/ome)
- [Nginx-RTMP](https://www.digitalocean.com/community/tutorials/how-to-set-up-a-video-streaming-server-using-nginx-rtmp-on-ubuntu-20-04)
- [Ant-Media-Server](https://github.com/ant-media/Ant-Media-Server)
Of course, you can also use media platforms that support streaming input.
### Multiple Outputs:
ffplayout supports multiple outputs in such a way that it can send the same stream to multiple targets with different encoding settings.
For example, if you want to stream at different resolutions, you could apply these output parameters:
```YAML
...
output_param: >-
-c:v:0 libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate:0 1300k
-bufsize:0 2600k
-preset faster
-tune zerolatency
-profile:v Main
-level 3.1
-c:a:0 aac
-ar:0 44100
-b:a:0 128k
-flags +global_header
-f flv rtmp://example.org/live/stream-high
-s 960x540
-c:v:1 libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate:1 1000k
-bufsize:1 1800k
-preset faster
-tune zerolatency
-profile:v Main
-level 3.1
-c:a:1 aac
-ar:1 44100
-b:a:1 128k
-flags +global_header
-f flv rtmp://example.org/live/stream-low
```
When you are using the text overlay filter, it will apply to all outputs.
The same applies to HLS output.
If you want to use different resolutions, you should apply them in order from largest to smallest. Use the largest resolution in the config under `processing:` and the smaller ones in `output_params:`.
## Desktop
In desktop mode, you will get your picture on the screen. For this, you need a desktop system; theoretically, all platforms should work here. ffplayout will require **ffplay** for that.
## HLS
In this mode, you can output directly to an HLS playlist. The nice thing here is that ffplayout requires fewer resources than in streaming mode.
HLS output is currently the default, mostly because it works out of the box and doesn't need a streaming target. By default, it saves the segments to **/usr/share/ffplayout/public/live/**.
**It is recommended to serve the HLS stream with nginx or another web server, and not with ffplayout (which is more meant for previewing).**
**HLS multiple outputs example:**
```YAML
...
output_param: >-
-filter_complex [0:v]split=3[v1_out][v2][v3];[v2]scale=w=960:h=540[v2_out];[v3]scale=w=640:h=360[v3_out];[0:a]asplit=3[a1][a2][a3]
-map [v1_out]
-map [a1]
-c:v:0 libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate:0 2000k
-bufsize:0 3200k
-preset faster
-tune zerolatency
-profile:v Main
-flags +cgop
-c:a:0 aac
-ar:0 44100
-b:a:0 128k
-map [v2_out]
-map [a2]
-c:v:1 libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate:1 1100k
-bufsize:1 2200k
-preset faster
-tune zerolatency
-profile:v Main
-flags +cgop
-c:a:1 aac
-ar:1 44100
-b:a:1 96k
-map [v3_out]
-map [a3]
-c:v:2 libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate:2 800k
-bufsize:2 1400k
-preset faster
-tune zerolatency
-profile:v Main
-flags +cgop
-c:a:2 aac
-ar:2 44100
-b:a:2 64k
-f hls
-hls_time 6
-hls_list_size 600
-hls_flags append_list+delete_segments+omit_endlist
-hls_segment_filename /var/www/html/live/stream_%v-%d.ts
-master_pl_name master.m3u8
-var_stream_map "v:0,a:0,name:720p v:1,a:1,name:540p v:2,a:2,name:360p"
/var/www/html/live/stream_%v.m3u8
```
The using of **-filter_complex** and *mapping* is very limited, don't use it in situations other then for splitting the outputs.
## Tee Muxer:
The tee pseudo-muxer in FFmpeg is crucial in live streaming scenarios where a single input needs to be encoded once and then broadcast to multiple outputs in different formats or protocols. This feature significantly reduces computational overhead and improves efficiency—in my tests, it achieved a 200% reduction in CPU processing expenditure—by eliminating the need for multiple FFmpeg instances or re-encoding the same input multiple times for different outputs.
**FFmpeg's Tee Pseudo-Muxer Parameter Configuration:**
The configuration of the tee pseudo-muxer in FFmpeg allows for the broadcasting of a single input to multiple outputs simultaneously, each with specific settings. This is accomplished by specifying distinct formats and protocols for each output within a single command line, thus minimizing computational load by avoiding re-encoding for each target.
### Parameters and Syntax:
```shell
-c:v libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate 1300k
-bufsize 2600k
-preset faster
-tune zerolatency
-profile:v Main
-level 3.1
-c:a aac
-ar 44100
-b:a 128k
-flags +cgop
-flags +global_header
-f tee
[f=flv:onfail=ignore]rtmp://127.0.0.1:1935/798e3a9e-47b5-4cd5-8079-76a20e03fee6.stream|[f=mpegts:onfail=ignore]udp://127.0.0.1:1234?pkt_size=1316|[f=hls:hls_time=6:hls_list_size=600:hls_flags=append_list+delete_segments+omit_endlist:hls_segment_filename=/usr/share/ffplayout/public/live/stream-%d.ts]/usr/share/ffplayout/public/live/stream.m3u8
```
**1. `-f tee`**: Specifies the use of the tee pseudo-muxer, which facilitates the multiplexing of the broadcast.
**2. Use of “|” (pipe)**: The pipe symbol "|" acts as a separator between the different outputs within the tee command. Each segment separated by a pipe configures a distinct output for the broadcast.
**3. Stream Processing by the Tee**:
- **First Output**: `[f=flv:onfail=ignore]rtmp://127.0.0.1:1935/798e3a9e-47b5-4cd5-8079-76a20e03fee6.stream`
- **f=flv**: Sets the output format to FLV (Flash Video).
- **onfail=ignore**: Directs FFmpeg to continue operating even if this output fails.
- **Second Output**: `[f=mpegts:onfail=ignore]udp://127.0.0.1:1234?pkt_size=1316`
- **f=mpegts**: Sets the output format to MPEG-TS (MPEG Transport Stream).
- **udp://...**: Uses the UDP protocol to send the stream with a specified packet size (`pkt_size=1316`).
- **Third Output**: `[f=hls:hls_time=6:hls_list_size=600:hls_flags=append_list+delete_segments+omit_endlist:hls_segment_filename=/usr/share/ffplayout/public/live/stream-%d.ts]/usr/share/ffplayout/public/live/stream.m3u8`
- **f=hls**: Sets the output format to HLS (HTTP Live Streaming).
Each stream is processed by the tee pseudo-muxer, which encodes the input only once, directing it to various outputs as specified, thereby allowing for efficient and less resource-intensive operation.

View File

@ -1,69 +0,0 @@
## Playlist Generation Template
It is possible to generate playlists based on templates. A template could look like:
```JSON
{
"sources": [
{
"start": "00:00:00",
"duration": "02:00:00",
"shuffle": true,
"paths": [
"/path/to/folder/1"
]
},
{
"start": "02:00:00",
"duration": "04:00:00",
"shuffle": false,
"paths": [
"/path/to/folder/2",
"/path/to/folder/3",
"/path/to/folder/4"
]
},
{
"start": "06:00:00",
"duration": "10:00:00",
"shuffle": true,
"paths": [
"/path/to/folder/5"
]
},
{
"start": "16:00:00",
"duration": "06:00:00",
"shuffle": false,
"paths": [
"/path/to/folder/6",
"/path/to/folder/7"
]
},
{
"start": "22:00:00",
"duration": "02:00:00",
"shuffle": true,
"paths": [
"/path/to/folder/8"
]
}
]
}
```
This can be used as file and run through CLI:
```BASH
ffplayout -g 2023-09-04 - 2023-09-10 --template 'path/to/playlist_template.json'
```
Or through API:
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/generate/2023-00-05
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
--data '{"template": {"sources": [\
{"start": "00:00:00", "duration": "10:00:00", "shuffle": true, "paths": ["path/1", "path/2"]}, \
{"start": "10:00:00", "duration": "14:00:00", "shuffle": false, "paths": ["path/3", "path/4"]}]}}'
```

View File

@ -1,201 +0,0 @@
### Preview Stream
When you are using the web frontend, you may wonder how to get a preview in the player. The default installation creates an HLS playlist, and the player uses this, but the HLS mode is not always utilized; instead, the stream output mode is activated.
So if you stream to an external server, you have different options to get a preview stream for your player. The simplest option would be to obtain an m3u8 playlist address from your external target, such as: https://example.org/live/stream.m3u8. You can use this in the configuration section of the frontend.
Another option (which has not been tested) is to add an HLS output option to your streaming parameters.
The next option is to install an RTMP server locally and create your preview stream there. In the following lines, this is described in more detail.
The ffplayout engine has no special preview config parameters, but you can add your settings to the **output_param**, like:
```YAML
-s 512x288
-c:v libx264
-crf 24
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate 800k
-bufsize 1600k
-preset ultrafast
-tune zerolatency
-profile:v Main
-level 3.1
-c:a aac
-ar 44100
-b:a 128k
-flags +global_header
-f flv rtmp://127.0.0.1/live/stream
...
```
In this documentation, we assume that you are using [SRS](https://github.com/ossrs/srs) at least for the preview stream. The most stable solution is previewing over HLS, but it is also possible to use [HTTP-FLV](https://github.com/ossrs/srs/wiki/v4_EN_DeliveryHttpStream) for lower latency.
To get this working, we need to follow some steps.
#### The first step is to compile and install SRS:
```BASH
# install some tool for compiling
apt install curl wget net-tools git build-essential autoconf automake libtool pkg-config gperf libssl-dev
cd /opt/
# get SRS
git clone https://github.com/ossrs/srs.git
cd srs/trunk
# get correct branch
git checkout 4.0release
./configure --ffmpeg-fit=off
make -j4
# install SRS to /usr/local/srs
make install
```
Now we need a systemd service to start SRS automatically. Create the file:
**/etc/systemd/system/srs.service**
with this content:
```INI
Description=SRS
Documentation=https://github.com/ossrs/srs/wiki
After=network.target
[Service]
Type=forking
ExecStartPre=/usr/local/srs/objs/srs -t -c /etc/srs/srs.conf
ExecStart=/usr/local/srs/objs/srs -c /etc/srs/srs.conf
ExecStop=/bin/kill -TERM \$MAINPID
ExecReload=/bin/kill -1 \$MAINPID
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target
```
Then create the config for SRS under **/etc/srs/srs.conf** with this content:
```NGINX
listen 1935;
max_connections 20;
daemon on;
pid /usr/local/srs/objs/srs.pid;
srs_log_tank console; # file;
srs_log_file /var/log/srs.log;
ff_log_dir /tmp;
srs_log_level error;
http_server {
enabled on;
listen 127.0.0.1:8080;
dir ./objs/nginx/html;
}
stats {
network 0;
disk sda vda xvda xvdb;
}
# for normal HLS streaming
vhost __defaultVhost__ {
enabled on;
play {
mix_correct on;
}
# switch enable off, for hls preview
http_remux {
enabled on;
mount [vhost]/[app]/[stream].flv;
}
# switch enable off, for http-flv preview
hls {
enabled on;
hls_path /var/www/srs;
hls_fragment 6;
hls_window 3600;
hls_cleanup on;
hls_dispose 0;
hls_m3u8_file live/stream.m3u8;
hls_ts_file live/stream-[seq].ts;
}
}
```
Now you can enable and start SRS with: `systemctl enable --now srs` and check if it is running: `systemctl status srs`.
#### Configure Nginx
We assume that you have already installed Nginx and are using it for the frontend. Open the frontend config **/etc/nginx/sites-enabled/ffplayout.conf** and add a new location to it:
```NGINX
location /live/stream.flv {
proxy_pass http://127.0.0.1:8080/live/stream.flv;
}
```
Full config looks like:
```NGINX
server {
listen 80;
server_name ffplayout.example.org;
gzip on;
gzip_types text/plain application/xml text/css application/javascript;
gzip_min_length 1000;
charset utf-8;
client_max_body_size 7000M; # should be desirable value
add_header X-Frame-Options SAMEORIGIN;
add_header X-Content-Type-Options nosniff;
add_header X-XSS-Protection "1; mode=block";
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 36000s;
proxy_connect_timeout 36000s;
proxy_send_timeout 36000s;
proxy_buffer_size 128k;
proxy_buffers 4 256k;
proxy_busy_buffers_size 256k;
send_timeout 36000s;
proxy_pass http://127.0.0.1:8787;
}
location /live/ {
alias /var/www/srs/live/;
}
location /live/stream.flv {
proxy_pass http://127.0.0.1:8080/live/stream.flv;
}
}
```
Of course, in production, you should have an HTTPS directive as well, but this step is up to you.
Restart Nginx.
You can (re)start ffplayout, and when you have set everything up correctly, it should run without errors.
You can now go to your frontend configuration and change the `player_url` to: `http://[domain or IP]/live/stream.flv` or `http://[domain or IP]/live/stream.m3u8`. Save and reload the page. When you go to the player tab, you should see the preview video.

View File

@ -1,18 +0,0 @@
### Video from URL
Videos from a URL are videos that you can watch directly in your browser or download. For example:
```json
{
"in": 0,
"out": 149,
"duration": 149,
"source": "https://example.org/big_buck_bunny.webm"
}
```
This should work in general because most of the time it has duration information and is faster to play than a real live stream source. Avoid seeking, as it can take too much time.
**Live streams as input in playlists, such as RTMP, are not supported.**
Be careful with this; it's better to test it multiple times!

View File

@ -1,10 +0,0 @@
### Stream Copy
ffplayout has supported a stream copy mode. A separate copy mode for video and audio is possible. This mode uses less CPU and RAM but has some drawbacks:
- All files must have exactly the same resolution, framerate, color depth, audio channels, and kHz.
- All files must use the same codecs and settings.
- The video and audio lines of a file must be the same length.
- The codecs and A/V settings must be supported by MPEG-TS and the output destination.
**This mode is experimental and will not have the same stability as the stream mode.**

11
docs/supervisor/README.md Normal file
View File

@ -0,0 +1,11 @@
SupervisorD
-----
The supervisor config is only needed when you want to run multiple channels.
Every channel has his own config in [conf.d](/supervisor/conf.d/) folder. In the configuration you have to change this line:
```
command=./venv/bin/python3 ffplayout.py -c /etc/ffplayout/ffplayout-001.yml
```
to the correct ffpalyout YAML config file.

View File

@ -0,0 +1,11 @@
[program:engine-001]
directory=/opt/ffplayout_engine
command=/opt/ffplayout_engine/venv/bin/python ffplayout.py -c /etc/ffplayout/ffplayout-001.yml
redirect_stderr=true
stdout_logfile=/var/log/ffplayout/engine-001.log
killasgroup=true
stopasgroup=true
autorestart=true
autostart=true
startsecs=2
startretries=10

View File

@ -0,0 +1,19 @@
[supervisord]
pidfile=/tmp/supervisord.pid
nodaemon=true
logfile=/dev/null
logfile_maxbytes=0
[include]
files = conf.d/*.conf
[inet_http_server]
port=127.0.0.1:9001
username = ffplayout
password = hsF0wQkl5zopEy1mBlT3g
[supervisorctl]
serverurl=http://127.0.0.1:9001
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface

View File

@ -1,204 +0,0 @@
[package]
name = "ffplayout"
description.workspace = true
readme.workspace = true
version.workspace = true
license.workspace = true
authors.workspace = true
repository.workspace = true
edition.workspace = true
[features]
default = ["embed_frontend"]
embed_frontend = []
[dependencies]
actix-files = "0.6"
actix-multipart = "0.7"
actix-web = "4"
actix-web-grants = "4"
actix-web-httpauth = "0.8"
actix-web-lab = "0.23"
actix-web-static-files = "4.0"
argon2 = "0.5"
chrono = { version = "0.4", default-features = false, features = ["clock", "std", "serde"] }
clap = { version = "4.3", features = ["derive", "env"] }
derive_more = { version = "1", features = ["display"] }
faccess = "0.2"
ffprobe = "0.4"
flexi_logger = { version = "0.29", features = ["kv", "colors"] }
futures-util = { version = "0.3", default-features = false, features = ["std"] }
jsonwebtoken = "9"
lazy_static = "1.4"
lettre = { version = "0.11", features = ["builder", "rustls-tls", "smtp-transport", "tokio1", "tokio1-rustls-tls"], default-features = false }
lexical-sort = "0.3"
local-ip-address = "0.6"
log = { version = "0.4", features = ["std", "serde", "kv", "kv_std", "kv_sval", "kv_serde"] }
m3u8-rs = "6"
nix = { version = "0.29", features = ["user", "fs"] }
notify = "7.0"
notify-debouncer-full = { version = "*", default-features = false }
num-traits = "0.2"
once_cell = "1"
paris = "1.5"
parking_lot = "0.12"
path-clean = "1.0"
rand = "0.8"
regex = "1"
relative-path = "1.8"
reqwest = { version = "0.12", default-features = false, features = ["blocking", "json", "rustls-tls"] }
rpassword = "7.2"
sanitize-filename = "0.5"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_with = "3.8"
shlex = "1.1"
static-files = "0.2"
sysinfo ={ version = "0.32", features = ["linux-netdevs", "linux-tmpfs"] }
sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"] }
time = { version = "0.3", features = ["formatting", "macros"] }
tokio = { version = "1.29", features = ["full"] }
tokio-stream = "0.1"
toml_edit = {version = "0.22", features = ["serde"]}
ts-rs = { version = "10", features = ["chrono-impl", "no-serde-warnings"] }
uuid = "1.8"
walkdir = "2"
zeromq = { version = "0.4", default-features = false, features = [
"tokio-runtime",
"tcp-transport",
] }
[build-dependencies]
static-files = "0.2"
[[bin]]
name = "ffplayout"
path = "src/main.rs"
# DEBIAN DEB PACKAGE
[package.metadata.deb]
name = "ffplayout"
priority = "optional"
section = "net"
license-file = ["../LICENSE", "0"]
depends = ""
suggests = "ffmpeg"
copyright = "Copyright (c) 2024, Jonathan Baecker. All rights reserved."
assets = [
[
"../target/x86_64-unknown-linux-musl/release/ffplayout",
"/usr/bin/",
"755",
],
[
"../assets/ffplayout.service",
"/lib/systemd/system/",
"644",
],
[
"../assets/dummy.vtt",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/DejaVuSans.ttf",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/FONT_LICENSE.txt",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/logo.png",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/ffplayout.conf",
"/usr/share/ffplayout/ffplayout.conf.example",
"644",
],
[
"../README.md",
"/usr/share/doc/ffplayout/README",
"644",
],
[
"../assets/ffplayout.1.gz",
"/usr/share/man/man1/",
"644",
],
]
maintainer-scripts = "../debian/"
systemd-units = { enable = true, unit-scripts = "../assets" }
[package.metadata.deb.variants.arm64]
assets = [
[
"../target/aarch64-unknown-linux-gnu/release/ffplayout",
"/usr/bin/",
"755",
],
[
"../assets/ffplayout.service",
"/lib/systemd/system/",
"644",
],
[
"../assets/dummy.vtt",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/DejaVuSans.ttf",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/FONT_LICENSE.txt",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/logo.png",
"/usr/share/ffplayout/",
"644",
],
[
"../assets/ffplayout.conf",
"/usr/share/ffplayout/ffplayout.conf.example",
"644",
],
[
"../README.md",
"/usr/share/doc/ffplayout/README",
"644",
],
[
"../assets/ffplayout.1.gz",
"/usr/share/man/man1/",
"644",
],
]
# RHEL RPM PACKAGE
[package.metadata.generate-rpm]
name = "ffplayout"
license = "GPL-3.0"
assets = [
{ source = "../target/x86_64-unknown-linux-musl/release/ffplayout", dest = "/usr/bin/ffplayout", mode = "755" },
{ source = "../assets/ffplayout.service", dest = "/lib/systemd/system/ffplayout.service", mode = "644" },
{ source = "../README.md", dest = "/usr/share/doc/ffplayout/README", mode = "644" },
{ source = "../assets/ffplayout.1.gz", dest = "/usr/share/man/man1/ffplayout.1.gz", mode = "644", doc = true },
{ source = "../LICENSE", dest = "/usr/share/doc/ffplayout/LICENSE", mode = "644" },
{ source = "../assets/dummy.vtt", dest = "/usr/share/ffplayout/dummy.vtt", mode = "644" },
{ source = "../assets/DejaVuSans.ttf", dest = "/usr/share/ffplayout/DejaVuSans.ttf", mode = "644" },
{ source = "../assets/FONT_LICENSE.txt", dest = "/usr/share/ffplayout/FONT_LICENSE.txt", mode = "644" },
{ source = "../assets/logo.png", dest = "/usr/share/ffplayout/logo.png", mode = "644" },
{ source = "../assets/ffplayout.conf", dest = "/usr/share/ffplayout/ffplayout.conf.example", mode = "644" },
{ source = "../debian/postinst", dest = "/usr/share/ffplayout/postinst", mode = "755" },
]
auto-req = "no"
post_install_script = "/usr/share/ffplayout/postinst"

View File

@ -1,15 +0,0 @@
use static_files::NpmBuild;
fn main() -> std::io::Result<()> {
if !cfg!(debug_assertions) && cfg!(feature = "embed_frontend") {
NpmBuild::new("../frontend")
.install()?
.run("generate")?
.target("../frontend/.output/public")
.change_detection()
.to_resource_dir()
.build()
} else {
Ok(())
}
}

View File

@ -1,60 +0,0 @@
use log::*;
use std::io::Write;
use flexi_logger::writers::{FileLogWriter, LogWriter};
use flexi_logger::{Age, Cleanup, Criterion, DeferredNow, FileSpec, Logger, Naming};
pub fn file_logger() -> Box<dyn LogWriter> {
Box::new(
FileLogWriter::builder(
FileSpec::default()
.suppress_timestamp()
.directory("./logs")
.discriminant("1")
.basename("ffplayout"),
)
.append()
.format(file_formatter)
.rotate(
Criterion::Age(Age::Day),
Naming::TimestampsCustomFormat {
current_infix: Some(""),
format: "%Y-%m-%d",
},
Cleanup::KeepLogFiles(4),
)
.print_message()
.try_build()
.unwrap(),
)
}
fn file_formatter(
w: &mut dyn Write,
now: &mut DeferredNow,
record: &Record,
) -> std::io::Result<()> {
write!(
w,
"[{}] [{:>5}] {}",
now.now().format("%Y-%m-%d %H:%M:%S%.6f"),
record.level(),
record.args()
)
}
fn main() {
Logger::try_with_str("WARN")
.expect("LogSpecification String has errors")
.print_message()
.log_to_stderr()
.add_writer("Alert", file_logger())
.start()
.unwrap();
error!(target : "{Alert,_Default}", "This is error message");
warn!(target : "{Alert,_Default}", "This is a warning");
info!(target : "{Alert,_Default}", "This is an info message");
debug!(target : "{Alert,_Default}", "This is an debug message");
trace!(target : "{Alert,_Default}", "This is an trace message");
}

View File

@ -1,85 +0,0 @@
use flexi_logger::writers::{FileLogWriter, LogWriter};
use flexi_logger::{Age, Cleanup, Criterion, DeferredNow, FileSpec, Naming, Record};
use log::{debug, error, info, kv::Value, trace, warn};
use std::collections::HashMap;
use std::io;
use std::sync::{Arc, Mutex};
struct MultiFileLogger {
writers: Arc<Mutex<HashMap<String, Arc<Mutex<FileLogWriter>>>>>,
}
impl MultiFileLogger {
pub fn new() -> Self {
MultiFileLogger {
writers: Arc::new(Mutex::new(HashMap::new())),
}
}
fn get_writer(&self, channel: &str) -> io::Result<Arc<Mutex<FileLogWriter>>> {
let mut writers = self.writers.lock().unwrap();
if !writers.contains_key(channel) {
let writer = FileLogWriter::builder(
FileSpec::default()
.suppress_timestamp()
.basename("ffplayout"),
)
.append()
.rotate(
Criterion::Age(Age::Day),
Naming::TimestampsCustomFormat {
current_infix: Some(""),
format: "%Y-%m-%d",
},
Cleanup::KeepLogFiles(7),
)
.print_message()
.try_build()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
writers.insert(channel.to_string(), Arc::new(Mutex::new(writer)));
}
Ok(writers.get(channel).unwrap().clone())
}
}
impl LogWriter for MultiFileLogger {
fn write(&self, now: &mut DeferredNow, record: &Record) -> io::Result<()> {
let channel = record
.key_values()
.get("channel".into())
.unwrap_or(Value::null())
.to_string();
let writer = self.get_writer(&channel);
let w = writer?.lock().unwrap().write(now, record);
w
}
fn flush(&self) -> io::Result<()> {
let writers = self.writers.lock().unwrap();
for writer in writers.values() {
writer.lock().unwrap().flush()?;
}
Ok(())
}
}
fn main() {
let logger = MultiFileLogger::new();
flexi_logger::Logger::try_with_str("trace")
.expect("LogSpecification String has errors")
.print_message()
.add_writer("file", Box::new(logger))
.log_to_stderr()
.start()
.unwrap();
trace!(target: "{file}", channel = 1; "This is a trace message for file1");
trace!("This is a trace message for console");
debug!(target: "{file}", channel = 2; "This is a debug message for file2");
info!(target:"{file}", channel = 2; "This is an info message for file2");
warn!(target: "{file}", channel = 1; "This is a warning for file1");
error!(target: "{file}", channel = 2; "This is an error message for file2");
info!("This is a info message for console");
}

View File

@ -1,54 +0,0 @@
use actix_web::error::ErrorUnauthorized;
use actix_web::Error;
use chrono::{TimeDelta, Utc};
use jsonwebtoken::{self, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
use crate::{
db::models::{GlobalSettings, Role},
utils::errors::ServiceError,
};
// Token lifetime
const JWT_EXPIRATION_DAYS: i64 = 7;
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct Claims {
pub id: i32,
pub channels: Vec<i32>,
pub username: String,
pub role: Role,
exp: i64,
}
impl Claims {
pub fn new(id: i32, channels: Vec<i32>, username: String, role: Role) -> Self {
Self {
id,
channels,
username,
role,
exp: (Utc::now() + TimeDelta::try_days(JWT_EXPIRATION_DAYS).unwrap()).timestamp(),
}
}
}
/// Create a json web token (JWT)
pub async fn create_jwt(claims: Claims) -> Result<String, ServiceError> {
let config = GlobalSettings::global();
let encoding_key = EncodingKey::from_secret(config.secret.clone().unwrap().as_bytes());
Ok(jsonwebtoken::encode(
&Header::default(),
&claims,
&encoding_key,
)?)
}
/// Decode a json web token (JWT)
pub async fn decode_jwt(token: &str) -> Result<Claims, Error> {
let config = GlobalSettings::global();
let decoding_key = DecodingKey::from_secret(config.secret.clone().unwrap().as_bytes());
jsonwebtoken::decode::<Claims>(token, &decoding_key, &Validation::default())
.map(|data| data.claims)
.map_err(|e| ErrorUnauthorized(e.to_string()))
}

View File

@ -1,2 +0,0 @@
pub mod auth;
pub mod routes;

File diff suppressed because it is too large Load Diff

View File

@ -1,552 +0,0 @@
use argon2::{
password_hash::{rand_core::OsRng, SaltString},
Argon2, PasswordHasher,
};
use rand::{distributions::Alphanumeric, Rng};
use sqlx::{sqlite::SqliteQueryResult, Pool, Row, Sqlite};
use tokio::task;
use super::models::{AdvancedConfiguration, Configuration};
use crate::db::models::{Channel, GlobalSettings, Role, TextPreset, User};
use crate::utils::{
advanced_config::AdvancedConfig, config::PlayoutConfig, is_running_in_container,
local_utc_offset,
};
pub async fn db_migrate(conn: &Pool<Sqlite>) -> Result<(), Box<dyn std::error::Error>> {
sqlx::migrate!("../migrations").run(conn).await?;
if select_global(conn).await.is_err() {
let secret: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(80)
.map(char::from)
.collect();
let shared = is_running_in_container().await;
let query = "CREATE TRIGGER global_row_count
BEFORE INSERT ON global
WHEN (SELECT COUNT(*) FROM global) >= 1
BEGIN
SELECT RAISE(FAIL, 'Database is already initialized!');
END;
INSERT INTO global(secret, shared) VALUES($1, $2);";
sqlx::query(query)
.bind(secret)
.bind(shared)
.execute(conn)
.await?;
}
Ok(())
}
pub async fn select_global(conn: &Pool<Sqlite>) -> Result<GlobalSettings, sqlx::Error> {
let query =
"SELECT id, secret, logs, playlists, public, storage, shared, mail_smtp, mail_user, mail_password, mail_starttls FROM global WHERE id = 1";
sqlx::query_as(query).fetch_one(conn).await
}
pub async fn update_global(
conn: &Pool<Sqlite>,
global: GlobalSettings,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE global SET logs = $2, playlists = $3, public = $4, storage = $5,
mail_smtp = $6, mail_user = $7, mail_password = $8, mail_starttls = $9 WHERE id = 1";
sqlx::query(query)
.bind(global.id)
.bind(global.logs)
.bind(global.playlists)
.bind(global.public)
.bind(global.storage)
.bind(global.mail_smtp)
.bind(global.mail_user)
.bind(global.mail_password)
.bind(global.mail_starttls)
.execute(conn)
.await
}
pub async fn select_channel(conn: &Pool<Sqlite>, id: &i32) -> Result<Channel, sqlx::Error> {
let query = "SELECT * FROM channels WHERE id = $1";
let mut result: Channel = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
result.utc_offset = local_utc_offset();
Ok(result)
}
pub async fn select_related_channels(
conn: &Pool<Sqlite>,
user_id: Option<i32>,
) -> Result<Vec<Channel>, sqlx::Error> {
let query = match user_id {
Some(id) => format!(
"SELECT c.id, c.name, c.preview_url, c.extra_extensions, c.active, c.public, c.playlists, c.storage, c.last_date, c.time_shift FROM channels c
left join user_channels uc on uc.channel_id = c.id
left join user u on u.id = uc.user_id
WHERE u.id = {id} ORDER BY c.id ASC;"
),
None => "SELECT * FROM channels ORDER BY id ASC;".to_string(),
};
let mut results: Vec<Channel> = sqlx::query_as(&query).fetch_all(conn).await?;
for result in results.iter_mut() {
result.utc_offset = local_utc_offset();
}
Ok(results)
}
pub async fn delete_user_channel(
conn: &Pool<Sqlite>,
user_id: i32,
channel_id: i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM user_channels WHERE user_id = $1 AND channel_id = $2";
sqlx::query(query)
.bind(user_id)
.bind(channel_id)
.execute(conn)
.await
}
pub async fn update_channel(
conn: &Pool<Sqlite>,
id: i32,
channel: Channel,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"UPDATE channels SET name = $2, preview_url = $3, extra_extensions = $4, public = $5, playlists = $6, storage = $7 WHERE id = $1";
sqlx::query(query)
.bind(id)
.bind(channel.name)
.bind(channel.preview_url)
.bind(channel.extra_extensions)
.bind(channel.public)
.bind(channel.playlists)
.bind(channel.storage)
.execute(conn)
.await
}
pub async fn update_stat(
conn: &Pool<Sqlite>,
id: i32,
last_date: String,
time_shift: f64,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE channels SET last_date = $2, time_shift = $3 WHERE id = $1";
sqlx::query(query)
.bind(id)
.bind(last_date)
.bind(time_shift)
.execute(conn)
.await
}
pub async fn update_player(
conn: &Pool<Sqlite>,
id: i32,
active: bool,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE channels SET active = $2 WHERE id = $1";
sqlx::query(query).bind(id).bind(active).execute(conn).await
}
pub async fn insert_channel(conn: &Pool<Sqlite>, channel: Channel) -> Result<Channel, sqlx::Error> {
let query = "INSERT INTO channels (name, preview_url, extra_extensions, public, playlists, storage) VALUES($1, $2, $3, $4, $5, $6)";
let result = sqlx::query(query)
.bind(channel.name)
.bind(channel.preview_url)
.bind(channel.extra_extensions)
.bind(channel.public)
.bind(channel.playlists)
.bind(channel.storage)
.execute(conn)
.await?;
sqlx::query_as("SELECT * FROM channels WHERE id = $1")
.bind(result.last_insert_rowid())
.fetch_one(conn)
.await
}
pub async fn delete_channel(
conn: &Pool<Sqlite>,
id: &i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM channels WHERE id = $1";
sqlx::query(query).bind(id).execute(conn).await
}
pub async fn select_last_channel(conn: &Pool<Sqlite>) -> Result<i32, sqlx::Error> {
let query = "select seq from sqlite_sequence WHERE name = 'channel';";
sqlx::query_scalar(query).fetch_one(conn).await
}
pub async fn select_configuration(
conn: &Pool<Sqlite>,
channel: i32,
) -> Result<Configuration, sqlx::Error> {
let query = "SELECT * FROM configurations WHERE channel_id = $1";
sqlx::query_as(query).bind(channel).fetch_one(conn).await
}
pub async fn insert_configuration(
conn: &Pool<Sqlite>,
channel_id: i32,
output_param: String,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "INSERT INTO configurations (channel_id, output_param) VALUES($1, $2)";
sqlx::query(query)
.bind(channel_id)
.bind(output_param)
.execute(conn)
.await
}
pub async fn update_configuration(
conn: &Pool<Sqlite>,
id: i32,
config: PlayoutConfig,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE configurations SET general_stop_threshold = $2, mail_subject = $3, mail_recipient = $4, mail_level = $5, mail_interval = $6, logging_ffmpeg_level = $7, logging_ingest_level = $8, logging_detect_silence = $9, logging_ignore = $10, processing_mode = $11, processing_audio_only = $12, processing_copy_audio = $13, processing_copy_video = $14, processing_width = $15, processing_height = $16, processing_aspect = $17, processing_fps = $18, processing_add_logo = $19, processing_logo = $20, processing_logo_scale = $21, processing_logo_opacity = $22, processing_logo_position = $23, processing_audio_tracks = $24, processing_audio_track_index = $25, processing_audio_channels = $26, processing_volume = $27, processing_filter = $28, processing_vtt_enable = $29, processing_vtt_dummy = $30, ingest_enable = $31, ingest_param = $32, ingest_filter = $33, playlist_day_start = $34, playlist_length = $35, playlist_infinit = $36, storage_filler = $37, storage_extensions = $38, storage_shuffle = $39, text_add = $40, text_from_filename = $41, text_font = $42, text_style = $43, text_regex = $44, task_enable = $45, task_path = $46, output_mode = $47, output_param = $48 WHERE id = $1";
sqlx::query(query)
.bind(id)
.bind(config.general.stop_threshold)
.bind(config.mail.subject)
.bind(config.mail.recipient)
.bind(config.mail.mail_level.as_str())
.bind(config.mail.interval)
.bind(config.logging.ffmpeg_level)
.bind(config.logging.ingest_level)
.bind(config.logging.detect_silence)
.bind(config.logging.ignore_lines.join(";"))
.bind(config.processing.mode.to_string())
.bind(config.processing.audio_only)
.bind(config.processing.copy_audio)
.bind(config.processing.copy_video)
.bind(config.processing.width)
.bind(config.processing.height)
.bind(config.processing.aspect)
.bind(config.processing.fps)
.bind(config.processing.add_logo)
.bind(config.processing.logo)
.bind(config.processing.logo_scale)
.bind(config.processing.logo_opacity)
.bind(config.processing.logo_position)
.bind(config.processing.audio_tracks)
.bind(config.processing.audio_track_index)
.bind(config.processing.audio_channels)
.bind(config.processing.volume)
.bind(config.processing.custom_filter)
.bind(config.processing.vtt_enable)
.bind(config.processing.vtt_dummy)
.bind(config.ingest.enable)
.bind(config.ingest.input_param)
.bind(config.ingest.custom_filter)
.bind(config.playlist.day_start)
.bind(config.playlist.length)
.bind(config.playlist.infinit)
.bind(config.storage.filler)
.bind(config.storage.extensions.join(";"))
.bind(config.storage.shuffle)
.bind(config.text.add_text)
.bind(config.text.text_from_filename)
.bind(config.text.font)
.bind(config.text.style)
.bind(config.text.regex)
.bind(config.task.enable)
.bind(config.task.path.to_string_lossy().to_string())
.bind(config.output.mode.to_string())
.bind(config.output.output_param)
.execute(conn)
.await
}
pub async fn insert_advanced_configuration(
conn: &Pool<Sqlite>,
channel_id: i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "INSERT INTO advanced_configurations (channel_id) VALUES($1)";
sqlx::query(query).bind(channel_id).execute(conn).await
}
pub async fn update_advanced_configuration(
conn: &Pool<Sqlite>,
channel_id: i32,
config: AdvancedConfig,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE advanced_configurations SET decoder_input_param = $2, decoder_output_param = $3, encoder_input_param = $4, ingest_input_param = $5, filter_deinterlace = $6, filter_pad_scale_w = $7, filter_pad_scale_h = $8, filter_pad_video = $9, filter_fps = $10, filter_scale = $11, filter_set_dar = $12, filter_fade_in = $13, filter_fade_out = $14, filter_overlay_logo_scale = $15, filter_overlay_logo_fade_in = $16, filter_overlay_logo_fade_out = $17, filter_overlay_logo = $18, filter_tpad = $19, filter_drawtext_from_file = $20, filter_drawtext_from_zmq = $21, filter_aevalsrc = $22, filter_afade_in = $23, filter_afade_out = $24, filter_apad = $25, filter_volume = $26, filter_split = $27 WHERE channel_id = $1";
sqlx::query(query)
.bind(channel_id)
.bind(config.decoder.input_param)
.bind(config.decoder.output_param)
.bind(config.encoder.input_param)
.bind(config.ingest.input_param)
.bind(config.filter.deinterlace)
.bind(config.filter.pad_scale_w)
.bind(config.filter.pad_scale_h)
.bind(config.filter.pad_video)
.bind(config.filter.fps)
.bind(config.filter.scale)
.bind(config.filter.set_dar)
.bind(config.filter.fade_in)
.bind(config.filter.fade_out)
.bind(config.filter.overlay_logo_scale)
.bind(config.filter.overlay_logo_fade_in)
.bind(config.filter.overlay_logo_fade_out)
.bind(config.filter.overlay_logo)
.bind(config.filter.tpad)
.bind(config.filter.drawtext_from_file)
.bind(config.filter.drawtext_from_zmq)
.bind(config.filter.aevalsrc)
.bind(config.filter.afade_in)
.bind(config.filter.afade_out)
.bind(config.filter.apad)
.bind(config.filter.volume)
.bind(config.filter.split)
.execute(conn)
.await
}
pub async fn select_advanced_configuration(
conn: &Pool<Sqlite>,
channel: i32,
) -> Result<AdvancedConfiguration, sqlx::Error> {
let query = "SELECT * FROM advanced_configurations WHERE channel_id = $1";
sqlx::query_as(query).bind(channel).fetch_one(conn).await
}
pub async fn select_role(conn: &Pool<Sqlite>, id: &i32) -> Result<Role, sqlx::Error> {
let query = "SELECT name FROM roles WHERE id = $1";
let result: Role = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
Ok(result)
}
pub async fn select_login(conn: &Pool<Sqlite>, user: &str) -> Result<User, sqlx::Error> {
let query =
"SELECT u.id, u.mail, u.username, u.password, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
left join user_channels uc on uc.user_id = u.id
WHERE u.username = $1";
sqlx::query_as(query).bind(user).fetch_one(conn).await
}
pub async fn select_user(conn: &Pool<Sqlite>, id: i32) -> Result<User, sqlx::Error> {
let query = "SELECT u.id, u.mail, u.username, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
left join user_channels uc on uc.user_id = u.id
WHERE u.id = $1";
sqlx::query_as(query).bind(id).fetch_one(conn).await
}
pub async fn select_global_admins(conn: &Pool<Sqlite>) -> Result<Vec<User>, sqlx::Error> {
let query = "SELECT u.id, u.mail, u.username, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
left join user_channels uc on uc.user_id = u.id
WHERE u.role_id = 1";
sqlx::query_as(query).fetch_all(conn).await
}
pub async fn select_users(conn: &Pool<Sqlite>) -> Result<Vec<User>, sqlx::Error> {
let query = "SELECT id, username FROM user";
sqlx::query_as(query).fetch_all(conn).await
}
pub async fn insert_user(conn: &Pool<Sqlite>, user: User) -> Result<(), sqlx::Error> {
let password_hash = task::spawn_blocking(move || {
let salt = SaltString::generate(&mut OsRng);
let hash = Argon2::default()
.hash_password(user.password.clone().as_bytes(), &salt)
.unwrap();
hash.to_string()
})
.await
.unwrap();
let query =
"INSERT INTO user (mail, username, password, role_id) VALUES($1, $2, $3, $4) RETURNING id";
let user_id: i32 = sqlx::query(query)
.bind(user.mail)
.bind(user.username)
.bind(password_hash)
.bind(user.role_id)
.fetch_one(conn)
.await?
.get("id");
if let Some(channel_ids) = user.channel_ids {
insert_user_channel(conn, user_id, channel_ids).await?;
}
Ok(())
}
pub async fn insert_or_update_user(conn: &Pool<Sqlite>, user: User) -> Result<(), sqlx::Error> {
let password_hash = task::spawn_blocking(move || {
let salt = SaltString::generate(&mut OsRng);
let hash = Argon2::default()
.hash_password(user.password.clone().as_bytes(), &salt)
.unwrap();
hash.to_string()
})
.await
.unwrap();
let query = "INSERT INTO user (mail, username, password, role_id) VALUES($1, $2, $3, $4)
ON CONFLICT(username) DO UPDATE SET
mail = excluded.mail, username = excluded.username, password = excluded.password, role_id = excluded.role_id
RETURNING id";
let user_id: i32 = sqlx::query(query)
.bind(user.mail)
.bind(user.username)
.bind(password_hash)
.bind(user.role_id)
.fetch_one(conn)
.await?
.get("id");
if let Some(channel_ids) = user.channel_ids {
insert_user_channel(conn, user_id, channel_ids).await?;
}
Ok(())
}
pub async fn update_user(
conn: &Pool<Sqlite>,
id: i32,
fields: String,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = format!("UPDATE user SET {fields} WHERE id = $1");
sqlx::query(&query).bind(id).execute(conn).await
}
pub async fn insert_user_channel(
conn: &Pool<Sqlite>,
user_id: i32,
channel_ids: Vec<i32>,
) -> Result<(), sqlx::Error> {
for channel in &channel_ids {
let query = "INSERT OR IGNORE INTO user_channels (channel_id, user_id) VALUES ($1, $2);";
sqlx::query(query)
.bind(channel)
.bind(user_id)
.execute(conn)
.await?;
}
Ok(())
}
pub async fn delete_user(conn: &Pool<Sqlite>, id: i32) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM user WHERE id = $1;";
sqlx::query(query).bind(id).execute(conn).await
}
pub async fn select_presets(conn: &Pool<Sqlite>, id: i32) -> Result<Vec<TextPreset>, sqlx::Error> {
let query = "SELECT * FROM presets WHERE channel_id = $1";
sqlx::query_as(query).bind(id).fetch_all(conn).await
}
pub async fn update_preset(
conn: &Pool<Sqlite>,
id: &i32,
preset: TextPreset,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"UPDATE presets SET name = $1, text = $2, x = $3, y = $4, fontsize = $5, line_spacing = $6,
fontcolor = $7, alpha = $8, box = $9, boxcolor = $10, boxborderw = $11 WHERE id = $12";
sqlx::query(query)
.bind(preset.name)
.bind(preset.text)
.bind(preset.x)
.bind(preset.y)
.bind(preset.fontsize)
.bind(preset.line_spacing)
.bind(preset.fontcolor)
.bind(preset.alpha)
.bind(preset.r#box)
.bind(preset.boxcolor)
.bind(preset.boxborderw)
.bind(id)
.execute(conn)
.await
}
pub async fn insert_preset(
conn: &Pool<Sqlite>,
preset: TextPreset,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"INSERT INTO presets (channel_id, name, text, x, y, fontsize, line_spacing, fontcolor, alpha, box, boxcolor, boxborderw)
VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)";
sqlx::query(query)
.bind(preset.channel_id)
.bind(preset.name)
.bind(preset.text)
.bind(preset.x)
.bind(preset.y)
.bind(preset.fontsize)
.bind(preset.line_spacing)
.bind(preset.fontcolor)
.bind(preset.alpha)
.bind(preset.r#box)
.bind(preset.boxcolor)
.bind(preset.boxborderw)
.execute(conn)
.await
}
pub async fn new_channel_presets(
conn: &Pool<Sqlite>,
channel_id: i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "INSERT INTO presets (name, text, x, y, fontsize, line_spacing, fontcolor, box, boxcolor, boxborderw, alpha, channel_id)
VALUES ('Default', 'Welcome to ffplayout messenger!', '(w-text_w)/2', '(h-text_h)/2', '24', '4', '#ffffff@0xff', '0', '#000000@0x80', '4', '1.0', $1),
('Empty Text', '', '0', '0', '24', '4', '#000000', '0', '#000000', '0', '0', $1),
('Bottom Text fade in', 'The upcoming event will be delayed by a few minutes.', '(w-text_w)/2', '(h-line_h)*0.9', '24', '4', '#ffffff', '1', '#000000@0x80', '4', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),0,if(lt(t,ld(1)+2),(t-(ld(1)+1))/1,if(lt(t,ld(1)+8),1,if(lt(t,ld(1)+9),(1-(t-(ld(1)+8)))/1,0))))', $1),
('Scrolling Text', 'We have a very important announcement to make.', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),w+4,w-w/12*mod(t-ld(1),12*(w+tw)/w))', '(h-line_h)*0.9', '24', '4', '#ffffff', '1', '#000000@0x80', '4', '1.0', $1);";
sqlx::query(query).bind(channel_id).execute(conn).await
}
pub async fn delete_preset(
conn: &Pool<Sqlite>,
id: &i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM presets WHERE id = $1;";
sqlx::query(query).bind(id).execute(conn).await
}

View File

@ -1,40 +0,0 @@
use std::io::{stdin, stdout, Write};
use sqlx::{migrate::MigrateDatabase, Pool, Sqlite, SqlitePool};
pub mod handles;
pub mod models;
use crate::utils::db_path;
pub async fn db_pool() -> Result<Pool<Sqlite>, sqlx::Error> {
let db_path = db_path().unwrap();
if !Sqlite::database_exists(db_path).await.unwrap_or(false) {
Sqlite::create_database(db_path).await.unwrap();
}
let conn = SqlitePool::connect(db_path).await?;
Ok(conn)
}
pub async fn db_drop() {
let mut drop_answer = String::new();
print!("Drop Database [Y/n]: ");
stdout().flush().unwrap();
stdin()
.read_line(&mut drop_answer)
.expect("Did not enter a yes or no?");
let drop = drop_answer.trim().to_lowercase().starts_with('y');
if drop {
match Sqlite::drop_database(db_path().unwrap()).await {
Ok(_) => println!("Successfully dropped DB"),
Err(e) => eprintln!("{e}"),
};
};
}

View File

@ -1,434 +0,0 @@
use std::{error::Error, fmt, str::FromStr};
use once_cell::sync::OnceCell;
use regex::Regex;
use serde::{
de::{self, Visitor},
Deserialize, Serialize,
};
// use serde_with::{formats::CommaSeparator, serde_as, StringWithSeparator};
use sqlx::{sqlite::SqliteRow, FromRow, Pool, Row, Sqlite};
use crate::db::handles;
use crate::utils::config::PlayoutConfig;
#[derive(Clone, Default, Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct GlobalSettings {
pub id: i32,
pub secret: Option<String>,
pub logs: String,
pub playlists: String,
pub public: String,
pub storage: String,
pub shared: bool,
pub mail_smtp: String,
pub mail_user: String,
pub mail_password: String,
pub mail_starttls: bool,
}
impl GlobalSettings {
pub async fn new(conn: &Pool<Sqlite>) -> Self {
let global_settings = handles::select_global(conn);
match global_settings.await {
Ok(g) => g,
Err(_) => GlobalSettings {
id: 0,
secret: None,
logs: String::new(),
playlists: String::new(),
public: String::new(),
storage: String::new(),
shared: false,
mail_smtp: String::new(),
mail_user: String::new(),
mail_password: String::new(),
mail_starttls: false,
},
}
}
pub fn global() -> &'static GlobalSettings {
INSTANCE.get().expect("Config is not initialized")
}
}
static INSTANCE: OnceCell<GlobalSettings> = OnceCell::new();
pub async fn init_globales(conn: &Pool<Sqlite>) {
let config = GlobalSettings::new(conn).await;
INSTANCE.set(config).unwrap();
}
#[derive(Clone, Debug, Default, Deserialize, Serialize, sqlx::FromRow)]
pub struct Channel {
#[serde(default = "default_id", skip_deserializing)]
pub id: i32,
pub name: String,
pub preview_url: String,
pub extra_extensions: String,
pub active: bool,
pub public: String,
pub playlists: String,
pub storage: String,
pub last_date: Option<String>,
pub time_shift: f64,
#[sqlx(default)]
#[serde(default)]
pub utc_offset: i32,
}
fn default_id() -> i32 {
1
}
// #[serde_as]
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct User {
#[serde(skip_deserializing)]
pub id: i32,
#[serde(skip_serializing_if = "Option::is_none")]
pub mail: Option<String>,
pub username: String,
#[serde(skip_serializing, default = "empty_string")]
pub password: String,
pub role_id: Option<i32>,
// #[serde_as(as = "StringWithSeparator::<CommaSeparator, i32>")]
pub channel_ids: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub token: Option<String>,
}
impl FromRow<'_, SqliteRow> for User {
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
Ok(Self {
id: row.try_get("id").unwrap_or_default(),
mail: row.try_get("mail").unwrap_or_default(),
username: row.try_get("username").unwrap_or_default(),
password: row.try_get("password").unwrap_or_default(),
role_id: row.try_get("role_id").unwrap_or_default(),
channel_ids: Some(
row.try_get::<String, &str>("channel_ids")
.unwrap_or_default()
.split(',')
.map(|i| i.parse::<i32>().unwrap_or_default())
.collect(),
),
token: None,
})
}
}
fn empty_string() -> String {
"".to_string()
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct UserMeta {
pub id: i32,
pub channels: Vec<i32>,
}
impl UserMeta {
pub fn new(id: i32, channels: Vec<i32>) -> Self {
Self { id, channels }
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum Role {
GlobalAdmin,
ChannelAdmin,
User,
Guest,
}
impl Role {
pub fn set_role(role: &str) -> Self {
match role {
"global_admin" => Role::GlobalAdmin,
"channel_admin" => Role::ChannelAdmin,
"user" => Role::User,
_ => Role::Guest,
}
}
}
impl FromStr for Role {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
match input {
"global_admin" => Ok(Self::GlobalAdmin),
"channel_admin" => Ok(Self::ChannelAdmin),
"user" => Ok(Self::User),
_ => Ok(Self::Guest),
}
}
}
impl fmt::Display for Role {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::GlobalAdmin => write!(f, "global_admin"),
Self::ChannelAdmin => write!(f, "channel_admin"),
Self::User => write!(f, "user"),
Self::Guest => write!(f, "guest"),
}
}
}
impl<'r> sqlx::decode::Decode<'r, ::sqlx::Sqlite> for Role
where
&'r str: sqlx::decode::Decode<'r, sqlx::Sqlite>,
{
fn decode(
value: sqlx::sqlite::SqliteValueRef<'r>,
) -> Result<Role, Box<dyn Error + 'static + Send + Sync>> {
let value = <&str as sqlx::decode::Decode<sqlx::Sqlite>>::decode(value)?;
Ok(value.parse()?)
}
}
impl FromRow<'_, SqliteRow> for Role {
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
match row.get("name") {
"global_admin" => Ok(Self::GlobalAdmin),
"channel_admin" => Ok(Self::ChannelAdmin),
"user" => Ok(Self::User),
_ => Ok(Self::Guest),
}
}
}
#[derive(Debug, Deserialize, Serialize, Clone, sqlx::FromRow)]
pub struct TextPreset {
#[sqlx(default)]
#[serde(skip_deserializing)]
pub id: i32,
pub channel_id: i32,
pub name: String,
pub text: String,
pub x: String,
pub y: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub fontsize: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub line_spacing: String,
pub fontcolor: String,
pub r#box: String,
pub boxcolor: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub boxborderw: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub alpha: String,
}
/// Deserialize number or string
pub fn deserialize_number_or_string<'de, D>(deserializer: D) -> Result<String, D::Error>
where
D: serde::Deserializer<'de>,
{
struct StringOrNumberVisitor;
impl<'de> Visitor<'de> for StringOrNumberVisitor {
type Value = String;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string or a number")
}
fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
let re = Regex::new(r"0,([0-9]+)").unwrap();
let clean_string = re.replace_all(value, "0.$1").to_string();
Ok(clean_string)
}
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Self::Value, E> {
Ok(value.to_string())
}
fn visit_i64<E: de::Error>(self, value: i64) -> Result<Self::Value, E> {
Ok(value.to_string())
}
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
Ok(value.to_string())
}
}
deserializer.deserialize_any(StringOrNumberVisitor)
}
#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct Configuration {
pub id: i32,
pub channel_id: i32,
pub general_stop_threshold: f64,
pub mail_subject: String,
pub mail_recipient: String,
pub mail_level: String,
pub mail_interval: i64,
pub logging_ffmpeg_level: String,
pub logging_ingest_level: String,
pub logging_detect_silence: bool,
#[serde(default)]
pub logging_ignore: String,
pub processing_mode: String,
pub processing_audio_only: bool,
pub processing_copy_audio: bool,
pub processing_copy_video: bool,
pub processing_width: i64,
pub processing_height: i64,
pub processing_aspect: f64,
pub processing_fps: f64,
pub processing_add_logo: bool,
pub processing_logo: String,
pub processing_logo_scale: String,
pub processing_logo_opacity: f64,
pub processing_logo_position: String,
#[serde(default = "default_tracks")]
pub processing_audio_tracks: i32,
#[serde(default = "default_track_index")]
pub processing_audio_track_index: i32,
#[serde(default = "default_channels")]
pub processing_audio_channels: u8,
pub processing_volume: f64,
#[serde(default)]
pub processing_filter: String,
#[serde(default)]
pub processing_vtt_enable: bool,
#[serde(default)]
pub processing_vtt_dummy: Option<String>,
pub ingest_enable: bool,
pub ingest_param: String,
#[serde(default)]
pub ingest_filter: String,
pub playlist_day_start: String,
pub playlist_length: String,
pub playlist_infinit: bool,
pub storage_filler: String,
pub storage_extensions: String,
pub storage_shuffle: bool,
pub text_add: bool,
pub text_from_filename: bool,
pub text_font: String,
pub text_style: String,
pub text_regex: String,
pub task_enable: bool,
pub task_path: String,
pub output_mode: String,
pub output_param: String,
}
impl Configuration {
pub fn from(id: i32, channel_id: i32, config: PlayoutConfig) -> Self {
Self {
id,
channel_id,
general_stop_threshold: config.general.stop_threshold,
mail_subject: config.mail.subject,
mail_recipient: config.mail.recipient,
mail_level: config.mail.mail_level.to_string(),
mail_interval: config.mail.interval,
logging_ffmpeg_level: config.logging.ffmpeg_level,
logging_ingest_level: config.logging.ingest_level,
logging_detect_silence: config.logging.detect_silence,
logging_ignore: config.logging.ignore_lines.join(";"),
processing_mode: config.processing.mode.to_string(),
processing_audio_only: config.processing.audio_only,
processing_audio_track_index: config.processing.audio_track_index,
processing_copy_audio: config.processing.copy_audio,
processing_copy_video: config.processing.copy_video,
processing_width: config.processing.width,
processing_height: config.processing.height,
processing_aspect: config.processing.aspect,
processing_fps: config.processing.fps,
processing_add_logo: config.processing.add_logo,
processing_logo: config.processing.logo,
processing_logo_scale: config.processing.logo_scale,
processing_logo_opacity: config.processing.logo_opacity,
processing_logo_position: config.processing.logo_position,
processing_audio_tracks: config.processing.audio_tracks,
processing_audio_channels: config.processing.audio_channels,
processing_volume: config.processing.volume,
processing_filter: config.processing.custom_filter,
processing_vtt_enable: config.processing.vtt_enable,
processing_vtt_dummy: config.processing.vtt_dummy,
ingest_enable: config.ingest.enable,
ingest_param: config.ingest.input_param,
ingest_filter: config.ingest.custom_filter,
playlist_day_start: config.playlist.day_start,
playlist_length: config.playlist.length,
playlist_infinit: config.playlist.infinit,
storage_filler: config.storage.filler,
storage_extensions: config.storage.extensions.join(";"),
storage_shuffle: config.storage.shuffle,
text_add: config.text.add_text,
text_font: config.text.font,
text_from_filename: config.text.text_from_filename,
text_style: config.text.style,
text_regex: config.text.regex,
task_enable: config.task.enable,
task_path: config.task.path.to_string_lossy().to_string(),
output_mode: config.output.mode.to_string(),
output_param: config.output.output_param,
}
}
}
fn default_track_index() -> i32 {
-1
}
fn default_tracks() -> i32 {
1
}
fn default_channels() -> u8 {
2
}
#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct AdvancedConfiguration {
pub id: i32,
pub channel_id: i32,
pub decoder_input_param: Option<String>,
pub decoder_output_param: Option<String>,
pub encoder_input_param: Option<String>,
pub ingest_input_param: Option<String>,
pub filter_deinterlace: Option<String>,
pub filter_pad_scale_w: Option<String>,
pub filter_pad_scale_h: Option<String>,
pub filter_pad_video: Option<String>,
pub filter_fps: Option<String>,
pub filter_scale: Option<String>,
pub filter_set_dar: Option<String>,
pub filter_fade_in: Option<String>,
pub filter_fade_out: Option<String>,
pub filter_overlay_logo_scale: Option<String>,
pub filter_overlay_logo_fade_in: Option<String>,
pub filter_overlay_logo_fade_out: Option<String>,
pub filter_overlay_logo: Option<String>,
pub filter_tpad: Option<String>,
pub filter_drawtext_from_file: Option<String>,
pub filter_drawtext_from_zmq: Option<String>,
pub filter_aevalsrc: Option<String>,
pub filter_afade_in: Option<String>,
pub filter_afade_out: Option<String>,
pub filter_apad: Option<String>,
pub filter_volume: Option<String>,
pub filter_split: Option<String>,
}

View File

@ -1,47 +0,0 @@
use std::sync::{Arc, Mutex};
use actix_web::{dev::ServiceRequest, Error, HttpMessage};
use actix_web_grants::authorities::AttachAuthorities;
use actix_web_httpauth::extractors::bearer::BearerAuth;
use clap::Parser;
use lazy_static::lazy_static;
use sysinfo::{Disks, Networks, System};
pub mod api;
pub mod db;
pub mod macros;
pub mod player;
pub mod sse;
pub mod utils;
use api::auth;
use db::models::UserMeta;
use utils::advanced_config::AdvancedConfig;
use utils::args_parse::Args;
lazy_static! {
pub static ref ARGS: Args = Args::parse();
pub static ref DISKS: Arc<Mutex<Disks>> =
Arc::new(Mutex::new(Disks::new_with_refreshed_list()));
pub static ref NETWORKS: Arc<Mutex<Networks>> =
Arc::new(Mutex::new(Networks::new_with_refreshed_list()));
pub static ref SYS: Arc<Mutex<System>> = Arc::new(Mutex::new(System::new_all()));
}
pub async fn validator(
req: ServiceRequest,
credentials: BearerAuth,
) -> Result<ServiceRequest, (Error, ServiceRequest)> {
// We just get permissions from JWT
match auth::decode_jwt(credentials.token()).await {
Ok(claims) => {
req.attach(vec![claims.role]);
req.extensions_mut()
.insert(UserMeta::new(claims.id, claims.channels));
Ok(req)
}
Err(e) => Err((e, req)),
}
}

View File

@ -1,6 +0,0 @@
#[macro_export]
macro_rules! vec_strings {
($($str:expr),*) => ({
vec![$($str.to_string(),)*] as Vec<String>
});
}

View File

@ -1,286 +0,0 @@
use std::{
collections::HashSet,
fs::File,
io,
process::exit,
sync::{atomic::AtomicBool, Arc, Mutex},
thread,
};
use actix_web::{middleware::Logger, web, App, HttpServer};
use actix_web_httpauth::middleware::HttpAuthentication;
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
use actix_files::Files;
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
use actix_web_static_files::ResourceFiles;
use log::*;
use ffplayout::{
api::routes::*,
db::{db_drop, db_pool, handles, models::init_globales},
player::{
controller::{ChannelController, ChannelManager},
utils::{get_date, is_remote, json_validate::validate_playlist, JsonPlaylist},
},
sse::{broadcast::Broadcaster, routes::*, SseAuthState},
utils::{
args_parse::run_args,
config::get_config,
logging::{init_logging, MailQueue},
playlist::generate_playlist,
},
validator, ARGS,
};
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
use ffplayout::utils::public_path;
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
fn thread_counter() -> usize {
let available_threads = thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(1);
(available_threads / 2).max(2)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let mail_queues = Arc::new(Mutex::new(vec![]));
let pool = db_pool()
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
if let Err(c) = run_args(&pool).await {
exit(c);
}
init_globales(&pool).await;
init_logging(mail_queues.clone())?;
let channel_controllers = Arc::new(Mutex::new(ChannelController::new()));
if let Some(conn) = &ARGS.listen {
let channels = handles::select_related_channels(&pool, None)
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
for channel in channels.iter() {
let config = get_config(&pool, channel.id)
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
let manager = ChannelManager::new(Some(pool.clone()), channel.clone(), config.clone());
let m_queue = Arc::new(Mutex::new(MailQueue::new(channel.id, config.mail)));
channel_controllers
.lock()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
.add(manager.clone());
if let Ok(mut mqs) = mail_queues.lock() {
mqs.push(m_queue.clone());
}
if channel.active {
manager.async_start().await;
}
}
let ip_port = conn.split(':').collect::<Vec<&str>>();
let addr = ip_port[0];
let port = ip_port
.get(1)
.and_then(|p| p.parse::<u16>().ok())
.ok_or(io::Error::new(
io::ErrorKind::InvalidInput,
"<ADRESSE>:<PORT> needed! For example: 127.0.0.1:8787",
))?;
let controllers = web::Data::from(channel_controllers.clone());
let auth_state = web::Data::new(SseAuthState {
uuids: tokio::sync::Mutex::new(HashSet::new()),
});
let broadcast_data = Broadcaster::create();
let thread_count = thread_counter();
info!("Running ffplayout API, listen on http://{conn}");
let db_clone = pool.clone();
// no 'allow origin' here, give it to the reverse proxy
HttpServer::new(move || {
let queues = mail_queues.clone();
let auth = HttpAuthentication::bearer(validator);
let db_pool = web::Data::new(db_clone.clone());
// Customize logging format to get IP though proxies.
let logger = Logger::new("%{r}a \"%r\" %s %b \"%{Referer}i\" \"%{User-Agent}i\" %T")
.exclude_regex(r"/_nuxt/*");
let mut web_app = App::new()
.app_data(db_pool)
.app_data(web::Data::from(queues))
.app_data(controllers.clone())
.app_data(auth_state.clone())
.app_data(web::Data::from(Arc::clone(&broadcast_data)))
.wrap(logger)
.service(login)
.service(
web::scope("/api")
.wrap(auth.clone())
.service(add_user)
.service(get_user)
.service(get_by_name)
.service(get_users)
.service(remove_user)
.service(get_advanced_config)
.service(update_advanced_config)
.service(get_playout_config)
.service(update_playout_config)
.service(add_preset)
.service(get_presets)
.service(update_preset)
.service(delete_preset)
.service(get_channel)
.service(get_all_channels)
.service(patch_channel)
.service(add_channel)
.service(remove_channel)
.service(update_user)
.service(send_text_message)
.service(control_playout)
.service(media_current)
.service(process_control)
.service(get_playlist)
.service(save_playlist)
.service(gen_playlist)
.service(del_playlist)
.service(get_log)
.service(file_browser)
.service(add_dir)
.service(move_rename)
.service(remove)
.service(save_file)
.service(import_playlist)
.service(get_program)
.service(get_system_stat)
.service(generate_uuid),
)
.service(
web::scope("/data")
.service(validate_uuid)
.service(event_stream),
)
.service(get_file)
.service(get_public);
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
{
// in release mode embed frontend
let generated = generate();
web_app =
web_app.service(ResourceFiles::new("/", generated).resolve_not_found_to_root());
}
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
{
// in debug mode get frontend from path
web_app = web_app.service(Files::new("/", public_path()).index_file("index.html"));
}
web_app
})
.bind((addr, port))?
.workers(thread_count)
.run()
.await?;
} else if ARGS.drop_db {
db_drop().await;
} else {
let channels = ARGS.channels.clone().unwrap_or_else(|| vec![1]);
for (index, channel_id) in channels.iter().enumerate() {
let config = match get_config(&pool, *channel_id).await {
Ok(c) => c,
Err(e) => {
eprint!("No config found, channel may not exists!\nOriginal error message: ");
return Err(io::Error::new(io::ErrorKind::Other, e.to_string()));
}
};
let channel = handles::select_channel(&pool, channel_id)
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
let manager = ChannelManager::new(Some(pool.clone()), channel.clone(), config.clone());
if ARGS.foreground {
if ARGS.channels.is_none() {
error!(
"Foreground mode needs at least 1 channel, run with `--channels (1 2 ...)`"
);
exit(1);
}
let m_queue = Arc::new(Mutex::new(MailQueue::new(*channel_id, config.mail)));
channel_controllers
.lock()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
.add(manager.clone());
if let Ok(mut mqs) = mail_queues.lock() {
mqs.push(m_queue.clone());
}
manager.foreground_start(index).await;
} else if ARGS.generate.is_some() {
// run a simple playlist generator and save them to disk
if let Err(e) = generate_playlist(manager) {
error!("{e}");
exit(1);
};
} else if ARGS.validate {
let mut playlist_path = config.channel.playlists.clone();
let start_sec = config.playlist.start_sec.unwrap();
let date = get_date(false, start_sec, false);
if playlist_path.is_dir() || is_remote(&playlist_path.to_string_lossy()) {
let d: Vec<&str> = date.split('-').collect();
playlist_path = playlist_path
.join(d[0])
.join(d[1])
.join(date.clone())
.with_extension("json");
}
let f = File::options()
.read(true)
.write(false)
.open(&playlist_path)?;
let playlist: JsonPlaylist = serde_json::from_reader(f)?;
validate_playlist(
config,
Arc::new(Mutex::new(Vec::new())),
playlist,
Arc::new(AtomicBool::new(false)),
);
} else if !ARGS.init {
error!("Run ffplayout with parameters! Run ffplayout -h for more information.");
}
}
}
for channel_ctl in &channel_controllers.lock().unwrap().channels {
channel_ctl.channel.lock().unwrap().active = false;
channel_ctl.stop_all();
}
pool.close().await;
Ok(())
}

View File

@ -1,431 +0,0 @@
use std::{
fmt, fs,
io::{self, Read},
path::Path,
process::Child,
sync::{
atomic::{AtomicBool, AtomicUsize, Ordering},
Arc, Mutex,
},
thread,
time::Duration,
};
use actix_web::web;
use log::*;
use m3u8_rs::Playlist;
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite};
use walkdir::WalkDir;
use crate::player::{
output::{player, write_hls},
utils::{folder::fill_filler_list, Media},
};
use crate::utils::{
config::{OutputMode::*, PlayoutConfig},
errors::{ProcessError, ServiceError},
};
use crate::ARGS;
use crate::{
db::{handles, models::Channel},
utils::logging::Target,
};
const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Defined process units.
#[derive(Clone, Debug, Default, Copy, Eq, Serialize, Deserialize, PartialEq)]
pub enum ProcessUnit {
#[default]
Decoder,
Encoder,
Ingest,
}
impl fmt::Display for ProcessUnit {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ProcessUnit::Decoder => write!(f, "Decoder"),
ProcessUnit::Encoder => write!(f, "Encoder"),
ProcessUnit::Ingest => write!(f, "Ingest"),
}
}
}
use ProcessUnit::*;
#[derive(Clone, Debug, Default)]
pub struct ChannelManager {
pub db_pool: Option<Pool<Sqlite>>,
pub config: Arc<Mutex<PlayoutConfig>>,
pub channel: Arc<Mutex<Channel>>,
pub decoder: Arc<Mutex<Option<Child>>>,
pub encoder: Arc<Mutex<Option<Child>>>,
pub ingest: Arc<Mutex<Option<Child>>>,
pub ingest_is_running: Arc<AtomicBool>,
pub is_terminated: Arc<AtomicBool>,
pub is_alive: Arc<AtomicBool>,
pub is_processing: Arc<AtomicBool>,
pub filter_chain: Option<Arc<Mutex<Vec<String>>>>,
pub current_date: Arc<Mutex<String>>,
pub list_init: Arc<AtomicBool>,
pub current_media: Arc<Mutex<Option<Media>>>,
pub current_list: Arc<Mutex<Vec<Media>>>,
pub filler_list: Arc<Mutex<Vec<Media>>>,
pub current_index: Arc<AtomicUsize>,
pub filler_index: Arc<AtomicUsize>,
pub run_count: Arc<AtomicUsize>,
}
impl ChannelManager {
pub fn new(db_pool: Option<Pool<Sqlite>>, channel: Channel, config: PlayoutConfig) -> Self {
Self {
db_pool,
is_alive: Arc::new(AtomicBool::new(false)),
channel: Arc::new(Mutex::new(channel)),
config: Arc::new(Mutex::new(config)),
list_init: Arc::new(AtomicBool::new(true)),
current_media: Arc::new(Mutex::new(None)),
current_list: Arc::new(Mutex::new(vec![Media::new(0, "", false)])),
filler_list: Arc::new(Mutex::new(vec![])),
current_index: Arc::new(AtomicUsize::new(0)),
filler_index: Arc::new(AtomicUsize::new(0)),
run_count: Arc::new(AtomicUsize::new(0)),
..Default::default()
}
}
pub fn update_channel(self, other: &Channel) {
let mut channel = self.channel.lock().unwrap();
channel.name.clone_from(&other.name);
channel.preview_url.clone_from(&other.preview_url);
channel.extra_extensions.clone_from(&other.extra_extensions);
channel.active.clone_from(&other.active);
channel.last_date.clone_from(&other.last_date);
channel.time_shift.clone_from(&other.time_shift);
channel.utc_offset.clone_from(&other.utc_offset);
}
pub fn update_config(&self, new_config: PlayoutConfig) {
let mut config = self.config.lock().unwrap();
*config = new_config;
}
pub async fn async_start(&self) {
if !self.is_alive.load(Ordering::SeqCst) {
self.run_count.fetch_add(1, Ordering::SeqCst);
self.is_alive.store(true, Ordering::SeqCst);
self.is_terminated.store(false, Ordering::SeqCst);
self.list_init.store(true, Ordering::SeqCst);
let pool_clone = self.db_pool.clone().unwrap();
let self_clone = self.clone();
let channel_id = self.channel.lock().unwrap().id;
if let Err(e) = handles::update_player(&pool_clone, channel_id, true).await {
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
};
thread::spawn(move || {
let mut run_endless = true;
while run_endless {
let run_count = self_clone.run_count.clone();
if let Err(e) = start_channel(self_clone.clone()) {
run_count.fetch_sub(1, Ordering::SeqCst);
error!("{e}");
};
let active = self_clone.channel.lock().unwrap().active;
if !active {
run_endless = false;
} else {
self_clone.run_count.fetch_add(1, Ordering::SeqCst);
self_clone.is_alive.store(true, Ordering::SeqCst);
self_clone.is_terminated.store(false, Ordering::SeqCst);
self_clone.list_init.store(true, Ordering::SeqCst);
thread::sleep(Duration::from_millis(250));
}
}
trace!("Async start done");
});
}
}
pub async fn foreground_start(&self, index: usize) {
if !self.is_alive.load(Ordering::SeqCst) {
self.run_count.fetch_add(1, Ordering::SeqCst);
self.is_alive.store(true, Ordering::SeqCst);
self.is_terminated.store(false, Ordering::SeqCst);
self.list_init.store(true, Ordering::SeqCst);
let pool_clone = self.db_pool.clone().unwrap();
let self_clone = self.clone();
let channel_id = self.channel.lock().unwrap().id;
if let Err(e) = handles::update_player(&pool_clone, channel_id, true).await {
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
};
if index + 1 == ARGS.channels.clone().unwrap_or_default().len() {
let run_count = self_clone.run_count.clone();
tokio::task::spawn_blocking(move || {
if let Err(e) = start_channel(self_clone) {
run_count.fetch_sub(1, Ordering::SeqCst);
error!("{e}");
}
})
.await
.unwrap();
} else {
thread::spawn(move || {
let run_count = self_clone.run_count.clone();
if let Err(e) = start_channel(self_clone) {
run_count.fetch_sub(1, Ordering::SeqCst);
error!("{e}");
};
});
}
}
}
pub fn stop(&self, unit: ProcessUnit) -> Result<(), ProcessError> {
match unit {
Decoder => {
if let Some(proc) = self.decoder.lock()?.as_mut() {
proc.kill()
.map_err(|e| ProcessError::Custom(format!("Decoder: {e}")))?;
}
}
Encoder => {
if let Some(proc) = self.encoder.lock()?.as_mut() {
proc.kill()
.map_err(|e| ProcessError::Custom(format!("Encoder: {e}")))?;
}
}
Ingest => {
if let Some(proc) = self.ingest.lock()?.as_mut() {
proc.kill()
.map_err(|e| ProcessError::Custom(format!("Ingest: {e}")))?;
}
}
}
self.wait(unit)?;
Ok(())
}
fn run_wait(
&self,
unit: ProcessUnit,
child: &Arc<Mutex<Option<Child>>>,
) -> Result<(), ProcessError> {
if let Some(proc) = child.lock().unwrap().as_mut() {
loop {
match proc.try_wait() {
Ok(Some(_)) => break,
Ok(None) => thread::sleep(Duration::from_millis(10)),
Err(e) => return Err(ProcessError::Custom(format!("{unit}: {e}"))),
}
}
}
Ok(())
}
/// Wait for process to proper close.
/// This prevents orphaned/zombi processes in system
pub fn wait(&self, unit: ProcessUnit) -> Result<(), ProcessError> {
match unit {
Decoder => self.run_wait(unit, &self.decoder)?,
Encoder => self.run_wait(unit, &self.encoder)?,
Ingest => self.run_wait(unit, &self.ingest)?,
}
thread::sleep(Duration::from_millis(50));
Ok(())
}
pub async fn async_stop(&self) -> Result<(), ServiceError> {
let channel_id = self.channel.lock().unwrap().id;
if self.is_alive.load(Ordering::SeqCst) {
debug!(target: Target::all(), channel = channel_id; "Deactivate playout and stop all child processes from channel: <yellow>{channel_id}</>");
}
self.is_terminated.store(true, Ordering::SeqCst);
self.is_alive.store(false, Ordering::SeqCst);
self.ingest_is_running.store(false, Ordering::SeqCst);
self.run_count.fetch_sub(1, Ordering::SeqCst);
let pool = self.db_pool.clone().unwrap();
if let Err(e) = handles::update_player(&pool, channel_id, false).await {
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
};
for unit in [Decoder, Encoder, Ingest] {
let self_clone = self.clone();
if let Err(e) = web::block(move || self_clone.stop(unit)).await? {
if !e.to_string().contains("exited process") {
error!(target: Target::all(), channel = channel_id; "{e}")
}
}
}
Ok(())
}
/// No matter what is running, terminate them all.
pub fn stop_all(&self) {
let channel_id = self.channel.lock().unwrap().id;
if self.is_alive.load(Ordering::SeqCst) {
debug!(target: Target::all(), channel = channel_id; "Stop all child processes from channel: <yellow>{channel_id}</>");
}
self.is_terminated.store(true, Ordering::SeqCst);
self.is_alive.store(false, Ordering::SeqCst);
self.ingest_is_running.store(false, Ordering::SeqCst);
self.run_count.fetch_sub(1, Ordering::SeqCst);
for unit in [Decoder, Encoder, Ingest] {
if let Err(e) = self.stop(unit) {
if !e.to_string().contains("exited process") {
error!(target: Target::all(), channel = channel_id; "{e}")
}
}
}
}
}
#[derive(Clone, Debug, Default)]
pub struct ChannelController {
pub channels: Vec<ChannelManager>,
}
impl ChannelController {
pub fn new() -> Self {
Self { channels: vec![] }
}
pub fn add(&mut self, manager: ChannelManager) {
self.channels.push(manager);
}
pub fn get(&self, id: i32) -> Option<ChannelManager> {
for manager in self.channels.iter() {
if manager.channel.lock().unwrap().id == id {
return Some(manager.clone());
}
}
None
}
pub fn remove(&mut self, channel_id: i32) {
self.channels.retain(|manager| {
let channel = manager.channel.lock().unwrap();
channel.id != channel_id
});
}
pub fn run_count(&self) -> usize {
self.channels
.iter()
.filter(|manager| manager.is_alive.load(Ordering::SeqCst))
.count()
}
}
pub fn start_channel(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock()?.clone();
let mode = config.output.mode.clone();
let filler_list = manager.filler_list.clone();
let channel_id = config.general.channel_id;
drain_hls_path(&config.channel.public)?;
debug!(target: Target::all(), channel = channel_id; "Start ffplayout v{VERSION}, channel: <yellow>{channel_id}</>");
// Fill filler list, can also be a single file.
thread::spawn(move || {
fill_filler_list(&config, Some(filler_list));
});
match mode {
// write files/playlist to HLS m3u8 playlist
HLS => write_hls(manager),
// play on desktop or stream to a remote target
_ => player(manager),
}
}
pub fn drain_hls_path(path: &Path) -> io::Result<()> {
let m3u8_files = find_m3u8_files(path)?;
let mut pl_segments = vec![];
for file in m3u8_files {
let mut file = std::fs::File::open(file).unwrap();
let mut bytes: Vec<u8> = Vec::new();
file.read_to_end(&mut bytes).unwrap();
if let Ok(Playlist::MediaPlaylist(pl)) = m3u8_rs::parse_playlist_res(&bytes) {
for segment in pl.segments {
pl_segments.push(segment.uri);
}
};
}
delete_old_segments(path, &pl_segments)
}
/// Recursively searches for all files with the .m3u8 extension in the specified path.
fn find_m3u8_files(path: &Path) -> io::Result<Vec<String>> {
let mut m3u8_files = Vec::new();
for entry in WalkDir::new(path)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.path().is_file() && e.path().extension().map_or(false, |ext| ext == "m3u8"))
{
m3u8_files.push(entry.path().to_string_lossy().to_string());
}
Ok(m3u8_files)
}
/// Check if segment is in playlist, if not, delete it.
fn delete_old_segments<P: AsRef<Path> + Clone + std::fmt::Debug>(
path: P,
pl_segments: &[String],
) -> io::Result<()> {
for entry in WalkDir::new(path)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
e.path().is_file()
&& e.path()
.extension()
.map_or(false, |ext| ext == "ts" || ext == "vtt")
})
{
let filename = entry.file_name().to_string_lossy().to_string();
if !pl_segments.contains(&filename) {
fs::remove_file(entry.path())?;
}
}
Ok(())
}

View File

@ -1,41 +0,0 @@
use log::*;
use regex::Regex;
use crate::utils::logging::Target;
/// Apply custom filters
pub fn filter_node(id: i32, filter: &str) -> (String, String) {
let re = Regex::new(r"^;?(\[[0-9]:[^\[]+\])?|\[[^\[]+\]$").unwrap(); // match start/end link
let mut video_filter = String::new();
let mut audio_filter = String::new();
// match chain with audio and video filter
if filter.contains("[c_v_out]") && filter.contains("[c_a_out]") {
let v_pos = filter.find("[c_v_out]").unwrap();
let a_pos = filter.find("[c_a_out]").unwrap();
let mut delimiter = "[c_v_out]";
// split delimiter should be first filter output link
if v_pos > a_pos {
delimiter = "[c_a_out]";
}
if let Some((f_1, f_2)) = filter.split_once(delimiter) {
if f_2.contains("[c_a_out]") {
video_filter = re.replace_all(f_1, "").to_string();
audio_filter = re.replace_all(f_2, "").to_string();
} else {
video_filter = re.replace_all(f_2, "").to_string();
audio_filter = re.replace_all(f_1, "").to_string();
}
}
} else if filter.contains("[c_v_out]") {
video_filter = re.replace_all(filter, "").to_string();
} else if filter.contains("[c_a_out]") {
audio_filter = re.replace_all(filter, "").to_string();
} else if !filter.is_empty() && filter != "~" {
error!(target: Target::file_mail(), channel = id; "Custom filter is not well formatted, use correct out link names (\"[c_v_out]\" and/or \"[c_a_out]\"). Filter skipped!")
}
(video_filter, audio_filter)
}

View File

@ -1,709 +0,0 @@
use std::{
fmt,
path::Path,
sync::{Arc, Mutex},
};
use log::*;
use regex::Regex;
mod custom;
pub mod v_drawtext;
use crate::player::{
controller::ProcessUnit::*,
utils::{custom_format, fps_calc, is_close, Media},
};
use crate::utils::{
config::{OutputMode::*, PlayoutConfig},
logging::Target,
};
use crate::vec_strings;
#[derive(Clone, Debug, Copy, Eq, PartialEq)]
pub enum FilterType {
Audio,
Video,
}
impl fmt::Display for FilterType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
FilterType::Audio => write!(f, "a"),
FilterType::Video => write!(f, "v"),
}
}
}
use FilterType::*;
#[derive(Debug, Clone)]
pub struct Filters {
pub audio_chain: String,
pub video_chain: String,
pub output_chain: Vec<String>,
pub audio_map: Vec<String>,
pub video_map: Vec<String>,
pub audio_out_link: Vec<String>,
pub video_out_link: Vec<String>,
pub output_map: Vec<String>,
config: PlayoutConfig,
audio_position: i32,
video_position: i32,
audio_last: i32,
video_last: i32,
}
impl Filters {
pub fn new(config: PlayoutConfig, audio_position: i32) -> Self {
Self {
audio_chain: String::new(),
video_chain: String::new(),
output_chain: vec![],
audio_map: vec![],
video_map: vec![],
audio_out_link: vec![],
video_out_link: vec![],
output_map: vec![],
config,
audio_position,
video_position: 0,
audio_last: -1,
video_last: -1,
}
}
pub fn add_filter(&mut self, filter: &str, track_nr: i32, filter_type: FilterType) {
let (map, chain, position, last) = match filter_type {
Audio => (
&mut self.audio_map,
&mut self.audio_chain,
self.audio_position,
&mut self.audio_last,
),
Video => (
&mut self.video_map,
&mut self.video_chain,
self.video_position,
&mut self.video_last,
),
};
if *last != track_nr {
// start new filter chain
let mut selector = String::new();
let mut sep = String::new();
if !chain.is_empty() {
selector = format!("[{filter_type}out{last}]");
sep = ";".to_string()
}
chain.push_str(&selector);
if filter.starts_with("aevalsrc") || filter.starts_with("movie") {
chain.push_str(&format!("{sep}{filter}"));
} else {
chain.push_str(&format!(
// build audio/video selector like [0:a:0]
"{sep}[{position}:{filter_type}:{track_nr}]{filter}",
));
}
let m = format!("[{filter_type}out{track_nr}]");
map.push(m.clone());
self.output_map.append(&mut vec_strings!["-map", m]);
*last = track_nr;
} else if filter.starts_with(';') || filter.starts_with('[') {
chain.push_str(filter);
} else {
chain.push_str(&format!(",{filter}"))
}
}
pub fn cmd(&mut self) -> Vec<String> {
if !self.output_chain.is_empty() {
return self.output_chain.clone();
}
let mut v_chain = self.video_chain.clone();
let mut a_chain = self.audio_chain.clone();
if self.video_last >= 0 && !v_chain.ends_with(']') {
v_chain.push_str(&format!("[vout{}]", self.video_last));
}
if self.audio_last >= 0 && !a_chain.ends_with(']') {
a_chain.push_str(&format!("[aout{}]", self.audio_last));
}
let mut f_chain = v_chain;
let mut cmd = vec![];
if !a_chain.is_empty() {
if !f_chain.is_empty() {
f_chain.push(';');
}
f_chain.push_str(&a_chain);
}
if !f_chain.is_empty() {
cmd.push("-filter_complex".to_string());
cmd.push(f_chain);
}
cmd
}
pub fn map(&mut self) -> Vec<String> {
let mut o_map = self.output_map.clone();
if self.video_last == -1 && !self.config.processing.audio_only {
let v_map = "0:v".to_string();
if !o_map.contains(&v_map) {
o_map.append(&mut vec_strings!["-map", v_map]);
};
}
if self.audio_last == -1 {
for i in 0..self.config.processing.audio_tracks {
let a_map = format!("{}:a:{i}", self.audio_position);
if !o_map.contains(&a_map) {
o_map.append(&mut vec_strings!["-map", a_map]);
};
}
}
o_map
}
}
impl Default for Filters {
fn default() -> Self {
Self::new(PlayoutConfig::default(), 0)
}
}
fn deinterlace(field_order: &Option<String>, chain: &mut Filters, config: &PlayoutConfig) {
if let Some(order) = field_order {
if order != "progressive" {
let deinterlace = match config.advanced.filter.deinterlace.clone() {
Some(deinterlace) => deinterlace,
None => "yadif=0:-1:0".to_string(),
};
chain.add_filter(&deinterlace, 0, Video);
}
}
}
fn pad(aspect: f64, chain: &mut Filters, v_stream: &ffprobe::Stream, config: &PlayoutConfig) {
if !is_close(aspect, config.processing.aspect, 0.03) {
let mut scale = String::new();
if let (Some(w), Some(h)) = (v_stream.width, v_stream.height) {
if w > config.processing.width && aspect > config.processing.aspect {
scale = match config.advanced.filter.pad_scale_w.clone() {
Some(pad_scale_w) => {
custom_format(&format!("{pad_scale_w},"), &[&config.processing.width])
}
None => format!("scale={}:-1,", config.processing.width),
};
} else if h > config.processing.height && aspect < config.processing.aspect {
scale = match config.advanced.filter.pad_scale_h.clone() {
Some(pad_scale_h) => {
custom_format(&format!("{pad_scale_h},"), &[&config.processing.width])
}
None => format!("scale=-1:{},", config.processing.height),
};
}
}
let pad = match config.advanced.filter.pad_video.clone() {
Some(pad_video) => custom_format(
&format!("{scale}{pad_video}"),
&[
&config.processing.width.to_string(),
&config.processing.height.to_string(),
],
),
None => format!(
"{}pad=max(iw\\,ih*({1}/{2})):ow/({1}/{2}):(ow-iw)/2:(oh-ih)/2",
scale, config.processing.width, config.processing.height
),
};
chain.add_filter(&pad, 0, Video)
}
}
fn fps(fps: f64, chain: &mut Filters, config: &PlayoutConfig) {
if fps != config.processing.fps {
let fps_filter = match config.advanced.filter.fps.clone() {
Some(fps) => custom_format(&fps, &[&config.processing.fps]),
None => format!("fps={}", config.processing.fps),
};
chain.add_filter(&fps_filter, 0, Video)
}
}
fn scale(
width: Option<i64>,
height: Option<i64>,
aspect: f64,
chain: &mut Filters,
config: &PlayoutConfig,
) {
// width: i64, height: i64
if let (Some(w), Some(h)) = (width, height) {
if w != config.processing.width || h != config.processing.height {
let scale = match config.advanced.filter.scale.clone() {
Some(scale) => custom_format(
&scale,
&[&config.processing.width, &config.processing.height],
),
None => format!(
"scale={}:{}",
config.processing.width, config.processing.height
),
};
chain.add_filter(&scale, 0, Video);
} else {
chain.add_filter("null", 0, Video);
}
if !is_close(aspect, config.processing.aspect, 0.03) {
let dar = match config.advanced.filter.set_dar.clone() {
Some(set_dar) => custom_format(&set_dar, &[&config.processing.aspect]),
None => format!("setdar=dar={}", config.processing.aspect),
};
chain.add_filter(&dar, 0, Video);
}
} else {
let scale = match config.advanced.filter.scale.clone() {
Some(scale) => custom_format(
&scale,
&[&config.processing.width, &config.processing.height],
),
None => format!(
"scale={}:{}",
config.processing.width, config.processing.height
),
};
chain.add_filter(&scale, 0, Video);
let dar = match config.advanced.filter.set_dar.clone() {
Some(set_dar) => custom_format(&set_dar, &[&config.processing.aspect]),
None => format!("setdar=dar={}", config.processing.aspect),
};
chain.add_filter(&dar, 0, Video);
}
}
fn fade(
node: &mut Media,
chain: &mut Filters,
nr: i32,
filter_type: FilterType,
config: &PlayoutConfig,
) {
let mut t = "";
let mut fade_audio = false;
if filter_type == Audio {
t = "a";
if node.duration_audio > 0.0 && node.duration_audio != node.duration {
fade_audio = true;
}
}
if node.seek > 0.0 || node.unit == Ingest {
let mut fade_in = format!("{t}fade=in:st=0:d=0.5");
if t == "a" {
if let Some(fade) = config.advanced.filter.afade_in.clone() {
fade_in = custom_format(&fade, &[t]);
}
} else if let Some(fade) = config.advanced.filter.fade_in.clone() {
fade_in = custom_format(&fade, &[t]);
};
chain.add_filter(&fade_in, nr, filter_type);
}
if (node.out != node.duration && node.out - node.seek > 1.0) || fade_audio {
let mut fade_out = format!("{t}fade=out:st={}:d=1.0", (node.out - node.seek - 1.0));
if t == "a" {
if let Some(fade) = config.advanced.filter.afade_out.clone() {
fade_out = custom_format(&fade, &[node.out - node.seek - 1.0]);
}
} else if let Some(fade) = config.advanced.filter.fade_out.clone().clone() {
fade_out = custom_format(&fade, &[node.out - node.seek - 1.0]);
};
chain.add_filter(&fade_out, nr, filter_type);
}
}
fn overlay(node: &mut Media, chain: &mut Filters, config: &PlayoutConfig) {
if config.processing.add_logo
&& Path::new(&config.processing.logo_path).is_file()
&& &node.category != "advertisement"
{
let mut logo_chain = format!(
"null[v];movie={}:loop=0,setpts=N/(FRAME_RATE*TB),format=rgba,colorchannelmixer=aa={}",
config
.processing
.logo_path
.replace('\\', "/")
.replace(':', "\\\\:"),
config.processing.logo_opacity,
);
if node.last_ad {
match config.advanced.filter.overlay_logo_fade_in.clone() {
Some(fade_in) => logo_chain.push_str(&format!(",{fade_in}")),
None => logo_chain.push_str(",fade=in:st=0:d=1.0:alpha=1"),
};
}
if node.next_ad {
let length = node.out - node.seek - 1.0;
match config.advanced.filter.overlay_logo_fade_out.clone() {
Some(fade_out) => {
logo_chain.push_str(&custom_format(&format!(",{fade_out}"), &[length]))
}
None => logo_chain.push_str(&format!(",fade=out:st={length}:d=1.0:alpha=1")),
}
}
if !config.processing.logo_scale.is_empty() {
match &config.advanced.filter.overlay_logo_scale.clone() {
Some(logo_scale) => logo_chain.push_str(&custom_format(
&format!(",{logo_scale}"),
&[&config.processing.logo_scale],
)),
None => logo_chain.push_str(&format!(",scale={}", config.processing.logo_scale)),
}
}
match config.advanced.filter.overlay_logo.clone() {
Some(overlay) => {
if !overlay.starts_with(',') {
logo_chain.push(',');
}
logo_chain.push_str(&custom_format(
&overlay,
&[&config.processing.logo_position],
))
}
None => logo_chain.push_str(&format!(
"[l];[v][l]overlay={}:shortest=1",
config.processing.logo_position
)),
};
chain.add_filter(&logo_chain, 0, Video);
}
}
fn extend_video(node: &mut Media, chain: &mut Filters, config: &PlayoutConfig) {
if let Some(video_duration) = node
.probe
.as_ref()
.and_then(|p| p.video_streams.first())
.and_then(|v| v.duration.as_ref())
.and_then(|v| v.parse::<f64>().ok())
{
if node.out - node.seek > video_duration - node.seek + 0.1 && node.duration >= node.out {
let duration = (node.out - node.seek) - (video_duration - node.seek);
let tpad = match config.advanced.filter.tpad.clone() {
Some(pad) => custom_format(&pad, &[duration]),
None => format!("tpad=stop_mode=add:stop_duration={duration}"),
};
chain.add_filter(&tpad, 0, Video)
}
}
}
/// add drawtext filter for lower thirds messages
fn add_text(
node: &mut Media,
chain: &mut Filters,
config: &PlayoutConfig,
filter_chain: &Option<Arc<Mutex<Vec<String>>>>,
) {
if config.text.add_text
&& (config.text.text_from_filename || config.output.mode == HLS || node.unit == Encoder)
{
let filter = v_drawtext::filter_node(config, Some(node), filter_chain);
chain.add_filter(&filter, 0, Video);
}
}
fn add_audio(node: &Media, chain: &mut Filters, nr: i32, config: &PlayoutConfig) {
let audio = match config.advanced.filter.aevalsrc.clone() {
Some(aevalsrc) => custom_format(&aevalsrc, &[node.out - node.seek]),
None => format!(
"aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000",
node.out - node.seek
),
};
chain.add_filter(&audio, nr, Audio);
}
fn extend_audio(node: &mut Media, chain: &mut Filters, nr: i32, config: &PlayoutConfig) {
if !Path::new(&node.audio).is_file() {
if let Some(audio_duration) = node
.probe
.as_ref()
.and_then(|p| p.audio_streams.first())
.and_then(|a| a.duration.clone())
.and_then(|a| a.parse::<f64>().ok())
{
if node.out - node.seek > audio_duration - node.seek + 0.1 && node.duration >= node.out
{
let apad = match config.advanced.filter.apad.clone() {
Some(apad) => custom_format(&apad, &[node.out - node.seek]),
None => format!("apad=whole_dur={}", node.out - node.seek),
};
chain.add_filter(&apad, nr, Audio)
}
}
}
}
fn audio_volume(chain: &mut Filters, config: &PlayoutConfig, nr: i32) {
if config.processing.volume != 1.0 {
let volume = match config.advanced.filter.volume.clone() {
Some(volume) => custom_format(&volume, &[config.processing.volume]),
None => format!("volume={}", config.processing.volume),
};
chain.add_filter(&volume, nr, Audio)
}
}
fn aspect_calc(aspect_string: &Option<String>, config: &PlayoutConfig) -> f64 {
let mut source_aspect = config.processing.aspect;
if let Some(aspect) = aspect_string {
let aspect_vec: Vec<&str> = aspect.split(':').collect();
let w = aspect_vec[0].parse::<f64>().unwrap();
let h = aspect_vec[1].parse::<f64>().unwrap();
source_aspect = w / h;
}
source_aspect
}
pub fn split_filter(
chain: &mut Filters,
count: usize,
nr: i32,
filter_type: FilterType,
config: &PlayoutConfig,
) {
if count > 1 {
let out_link = match filter_type {
Audio => &mut chain.audio_out_link,
Video => &mut chain.video_out_link,
};
for i in 0..count {
let link = format!("[{filter_type}out_{nr}_{i}]");
if !out_link.contains(&link) {
out_link.push(link)
}
}
let split = match config.advanced.filter.split.clone() {
Some(split) => custom_format(&split, &[count.to_string(), out_link.join("")]),
None => format!("split={count}{}", out_link.join("")),
};
chain.add_filter(&split, nr, filter_type);
}
}
/// Process output filter chain and add new filters to existing ones.
fn process_output_filters(config: &PlayoutConfig, chain: &mut Filters, custom_filter: &str) {
let filter =
if (config.text.add_text && !config.text.text_from_filename) || config.output.mode == HLS {
let re_v = Regex::new(r"\[[0:]+[v^\[]+([:0]+)?\]").unwrap(); // match video filter input link
let _re_a = Regex::new(r"\[[0:]+[a^\[]+([:0]+)?\]").unwrap(); // match audio filter input link
let mut cf = custom_filter.to_string();
if !chain.video_chain.is_empty() {
cf = re_v
.replace(&cf, &format!("{},", chain.video_chain))
.to_string()
}
if !chain.audio_chain.is_empty() {
let audio_split = chain
.audio_chain
.split(';')
.enumerate()
.map(|(i, p)| p.replace(&format!("[aout{i}]"), ""))
.collect::<Vec<String>>();
for i in 0..config.processing.audio_tracks {
cf = cf.replace(
&format!("[0:a:{i}]"),
&format!("{},", &audio_split[i as usize]),
)
}
}
cf
} else {
custom_filter.to_string()
};
chain.output_chain = vec_strings!["-filter_complex", filter]
}
fn custom(filter: &str, chain: &mut Filters, nr: i32, filter_type: FilterType) {
if !filter.is_empty() {
chain.add_filter(filter, nr, filter_type);
}
}
pub fn filter_chains(
config: &PlayoutConfig,
node: &mut Media,
filter_chain: &Option<Arc<Mutex<Vec<String>>>>,
) -> Filters {
let mut filters = Filters::new(config.clone(), 0);
if node.source.contains("color=c=") {
filters.audio_position = 1;
}
if node.unit == Encoder {
if !config.processing.audio_only {
add_text(node, &mut filters, config, filter_chain);
}
if let Some(f) = config.output.output_filter.clone() {
process_output_filters(config, &mut filters, &f)
} else if config.output.output_count > 1 && !config.processing.audio_only {
split_filter(&mut filters, config.output.output_count, 0, Video, config);
}
return filters;
}
if !config.processing.audio_only && !config.processing.copy_video {
if let Some(probe) = node.probe.as_ref() {
if Path::new(&node.audio).is_file() {
filters.audio_position = 1;
}
if let Some(v_stream) = &probe.video_streams.first() {
let aspect = aspect_calc(&v_stream.display_aspect_ratio, config);
let frame_per_sec = fps_calc(&v_stream.r_frame_rate, 1.0);
deinterlace(&v_stream.field_order, &mut filters, config);
pad(aspect, &mut filters, v_stream, config);
fps(frame_per_sec, &mut filters, config);
scale(
v_stream.width,
v_stream.height,
aspect,
&mut filters,
config,
);
}
extend_video(node, &mut filters, config);
} else {
fps(0.0, &mut filters, config);
scale(None, None, 1.0, &mut filters, config);
}
add_text(node, &mut filters, config, filter_chain);
fade(node, &mut filters, 0, Video, config);
overlay(node, &mut filters, config);
}
let (proc_vf, proc_af) = if node.unit == Ingest {
custom::filter_node(config.general.channel_id, &config.ingest.custom_filter)
} else {
custom::filter_node(config.general.channel_id, &config.processing.custom_filter)
};
let (list_vf, list_af) = custom::filter_node(config.general.channel_id, &node.custom_filter);
if !config.processing.copy_video {
custom(&proc_vf, &mut filters, 0, Video);
custom(&list_vf, &mut filters, 0, Video);
}
let mut audio_indexes = vec![];
if config.processing.audio_track_index == -1 {
for i in 0..config.processing.audio_tracks {
audio_indexes.push(i)
}
} else {
audio_indexes.push(config.processing.audio_track_index)
}
if !config.processing.copy_audio {
for i in audio_indexes {
if node
.probe
.as_ref()
.and_then(|p| p.audio_streams.get(i as usize))
.is_some()
|| Path::new(&node.audio).is_file()
{
extend_audio(node, &mut filters, i, config);
} else if node.unit == Decoder && !node.source.contains("color=c=") {
warn!(target: Target::file_mail(), channel = config.general.channel_id;
"Missing audio track (id {i}) from <b><magenta>{}</></b>",
node.source
);
add_audio(node, &mut filters, i, config);
}
// add at least anull filter, for correct filter construction,
// is important for split filter in HLS mode
filters.add_filter("anull", i, Audio);
fade(node, &mut filters, i, Audio, config);
audio_volume(&mut filters, config, i);
custom(&proc_af, &mut filters, i, Audio);
custom(&list_af, &mut filters, i, Audio);
}
} else if config.processing.audio_track_index > -1 {
error!(target: Target::file_mail(), channel = config.general.channel_id; "Setting 'audio_track_index' other than '-1' is not allowed in audio copy mode!")
}
if config.output.mode == HLS {
if let Some(f) = config.output.output_filter.clone() {
process_output_filters(config, &mut filters, &f)
}
}
filters
}

View File

@ -1,74 +0,0 @@
use std::{
ffi::OsStr,
path::Path,
sync::{Arc, Mutex},
};
use regex::Regex;
use crate::player::{
controller::ProcessUnit::*,
utils::{custom_format, Media},
};
use crate::utils::config::PlayoutConfig;
pub fn filter_node(
config: &PlayoutConfig,
node: Option<&Media>,
filter_chain: &Option<Arc<Mutex<Vec<String>>>>,
) -> String {
let mut filter = String::new();
let mut font = String::new();
if Path::new(&config.text.font_path).is_file() {
font = format!(":fontfile='{}'", config.text.font_path)
}
let zmq_socket = match node.map(|n| n.unit) {
Some(Ingest) => config.text.zmq_server_socket.clone(),
_ => config.text.zmq_stream_socket.clone(),
};
if config.text.text_from_filename && node.is_some() {
let source = node.unwrap_or(&Media::new(0, "", false)).source.clone();
let text = match Regex::new(&config.text.regex)
.ok()
.and_then(|r| r.captures(&source))
{
Some(t) => t[1].to_string(),
None => Path::new(&source)
.file_stem()
.unwrap_or_else(|| OsStr::new(&source))
.to_string_lossy()
.to_string(),
};
let escaped_text = text
.replace('\'', "'\\\\\\''")
.replace('%', "\\\\\\%")
.replace(':', "\\:");
filter = match &config.advanced.filter.drawtext_from_file {
Some(drawtext) => custom_format(drawtext, &[&escaped_text, &config.text.style, &font]),
None => format!("drawtext=text='{escaped_text}':{}{font}", config.text.style),
};
} else if let Some(socket) = zmq_socket {
let mut filter_cmd = format!("text=''{font}");
if let Some(chain) = filter_chain {
if let Some(link) = chain.lock().unwrap().iter().find(|&l| l.contains("text")) {
filter_cmd = link.to_string();
}
}
filter = match config.advanced.filter.drawtext_from_zmq.clone() {
Some(drawtext) => custom_format(&drawtext, &[&socket.replace(':', "\\:"), &filter_cmd]),
None => format!(
"zmq=b=tcp\\\\://'{}',drawtext@dyntext={filter_cmd}",
socket.replace(':', "\\:")
),
};
}
filter
}

View File

@ -1,100 +0,0 @@
use std::{
path::Path,
sync::{
atomic::{AtomicBool, Ordering},
mpsc::channel,
{Arc, Mutex},
},
thread::sleep,
time::Duration,
};
use log::*;
use notify::{
event::{CreateKind, ModifyKind, RemoveKind, RenameMode},
EventKind::{Create, Modify, Remove},
RecursiveMode,
};
use notify_debouncer_full::new_debouncer;
use crate::player::utils::{include_file_extension, Media};
use crate::utils::{config::PlayoutConfig, logging::Target};
/// Create a watcher, which monitor file changes.
/// When a change is register, update the current file list.
/// This makes it possible, to play infinitely and and always new files to it.
pub fn watchman(
config: PlayoutConfig,
is_terminated: Arc<AtomicBool>,
sources: Arc<Mutex<Vec<Media>>>,
) {
let id = config.general.channel_id;
let path = Path::new(&config.channel.storage);
if !path.exists() {
error!(target: Target::file_mail(), channel = id; "Folder path not exists: '{path:?}'");
panic!("Folder path not exists: '{path:?}'");
}
// let (tx, rx) = channel();
let (tx, rx) = channel();
let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx).unwrap();
debouncer.watch(path, RecursiveMode::Recursive).unwrap();
while !is_terminated.load(Ordering::SeqCst) {
if let Ok(result) = rx.try_recv() {
match result {
Ok(events) => events.iter().for_each(|event| match event.kind {
Create(CreateKind::File) | Modify(ModifyKind::Name(RenameMode::To)) => {
let new_path = &event.paths[0];
if new_path.is_file() && include_file_extension(&config, new_path) {
let index = sources.lock().unwrap().len();
let media = Media::new(index, &new_path.to_string_lossy(), false);
sources.lock().unwrap().push(media);
info!(target: Target::file_mail(), channel = id; "Create new file: <b><magenta>{new_path:?}</></b>");
}
}
Remove(RemoveKind::File) | Modify(ModifyKind::Name(RenameMode::From)) => {
let old_path = &event.paths[0];
if !old_path.is_file() && include_file_extension(&config, old_path) {
sources
.lock()
.unwrap()
.retain(|x| x.source != old_path.to_string_lossy());
info!(target: Target::file_mail(), channel = id; "Remove file: <b><magenta>{old_path:?}</></b>");
}
}
Modify(ModifyKind::Name(RenameMode::Both)) => {
let old_path = &event.paths[0];
let new_path = &event.paths[1];
let mut media_list = sources.lock().unwrap();
if let Some(index) = media_list
.iter()
.position(|x| *x.source == old_path.display().to_string()) {
let media = Media::new(index, &new_path.to_string_lossy(), false);
media_list[index] = media;
info!(target: Target::file_mail(), channel = id; "Move file: <b><magenta>{old_path:?}</></b> to <b><magenta>{new_path:?}</></b>");
} else if include_file_extension(&config, new_path) {
let index = media_list.len();
let media = Media::new(index, &new_path.to_string_lossy(), false);
media_list.push(media);
info!(target: Target::file_mail(), channel = id; "Create new file: <b><magenta>{new_path:?}</></b>");
}
}
_ => trace!(target: Target::file_mail(), channel = id; "Not tracked file event: {event:?}")
}),
Err(errors) => errors.iter().for_each(|error| error!(target: Target::file_mail(), channel = id; "{error:?}")),
}
}
sleep(Duration::from_secs(3));
}
}

View File

@ -1,184 +0,0 @@
use std::{
io::{BufRead, BufReader, Read},
process::{ChildStderr, Command, Stdio},
sync::{atomic::Ordering, mpsc::SyncSender},
thread,
};
use log::*;
use crate::utils::{
config::{PlayoutConfig, FFMPEG_IGNORE_ERRORS, FFMPEG_UNRECOVERABLE_ERRORS},
logging::{log_line, Target},
};
use crate::vec_strings;
use crate::{
player::{
controller::{ChannelManager, ProcessUnit::*},
utils::{is_free_tcp_port, valid_stream, Media},
},
utils::errors::ProcessError,
};
fn server_monitor(
id: i32,
level: &str,
ignore: Vec<String>,
buffer: BufReader<ChildStderr>,
channel_mgr: ChannelManager,
) -> Result<(), ProcessError> {
for line in buffer.lines() {
let line = line?;
if !FFMPEG_IGNORE_ERRORS.iter().any(|i| line.contains(*i))
&& !ignore.iter().any(|i| line.contains(i))
{
log_line(&line, level);
}
if line.contains("rtmp") && line.contains("Unexpected stream") && !valid_stream(&line) {
warn!(target: Target::file_mail(), channel = id; "Unexpected ingest stream: {line}");
if let Err(e) = channel_mgr.stop(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}");
};
}
if FFMPEG_UNRECOVERABLE_ERRORS
.iter()
.any(|i| line.contains(*i))
{
error!(target: Target::file_mail(), channel = id; "Hit unrecoverable error!");
channel_mgr.channel.lock().unwrap().active = false;
channel_mgr.stop_all();
}
}
Ok(())
}
/// ffmpeg Ingest Server
///
/// Start ffmpeg in listen mode, and wait for input.
pub fn ingest_server(
config: PlayoutConfig,
ingest_sender: SyncSender<(usize, [u8; 65088])>,
channel_mgr: ChannelManager,
) -> Result<(), ProcessError> {
let id = config.general.channel_id;
let mut buffer: [u8; 65088] = [0; 65088];
let mut server_cmd = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
let stream_input = config.ingest.input_cmd.clone().unwrap();
let mut dummy_media = Media::new(0, "Live Stream", false);
dummy_media.unit = Ingest;
dummy_media.add_filter(&config, &None);
let is_terminated = channel_mgr.is_terminated.clone();
let ingest_is_running = channel_mgr.ingest_is_running.clone();
let vtt_dummy = config
.channel
.storage
.join(config.processing.vtt_dummy.clone().unwrap_or_default());
if let Some(ingest_input_cmd) = config.advanced.ingest.input_cmd {
server_cmd.append(&mut ingest_input_cmd.clone());
}
server_cmd.append(&mut stream_input.clone());
if config.processing.vtt_enable && vtt_dummy.is_file() {
server_cmd.append(&mut vec_strings!["-i", vtt_dummy.to_string_lossy()]);
}
if let Some(mut filter) = dummy_media.filter {
server_cmd.append(&mut filter.cmd());
server_cmd.append(&mut filter.map());
}
if config.processing.vtt_enable && vtt_dummy.is_file() {
server_cmd.append(&mut vec_strings!("-map", "1:s"));
}
if let Some(mut cmd) = config.processing.cmd {
server_cmd.append(&mut cmd);
}
let mut is_running;
debug!(target: Target::file_mail(), channel = id;
"Server CMD: <bright-blue>\"ffmpeg {}\"</>",
server_cmd.join(" ")
);
if let Some(url) = stream_input.iter().find(|s| s.contains("://")) {
if !is_free_tcp_port(id, url) {
channel_mgr.channel.lock().unwrap().active = false;
channel_mgr.stop_all();
} else {
info!(target: Target::file_mail(), channel = id; "Start ingest server, listening on: <b><magenta>{url}</></b>");
}
};
while !is_terminated.load(Ordering::SeqCst) {
let proc_ctl = channel_mgr.clone();
let level = config.logging.ingest_level.clone();
let ignore = config.logging.ignore_lines.clone();
let mut server_proc = match Command::new("ffmpeg")
.args(server_cmd.clone())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn ingest server: {e}");
panic!("couldn't spawn ingest server: {e}")
}
Ok(proc) => proc,
};
let mut ingest_reader = BufReader::new(server_proc.stdout.take().unwrap());
let server_err = BufReader::new(server_proc.stderr.take().unwrap());
let error_reader_thread =
thread::spawn(move || server_monitor(id, &level, ignore, server_err, proc_ctl));
*channel_mgr.ingest.lock().unwrap() = Some(server_proc);
is_running = false;
loop {
let bytes_len = match ingest_reader.read(&mut buffer[..]) {
Ok(length) => length,
Err(e) => {
debug!(target: Target::file_mail(), channel = id; "Ingest server read {e:?}");
break;
}
};
if !is_running {
ingest_is_running.store(true, Ordering::SeqCst);
is_running = true;
}
if bytes_len > 0 {
if let Err(e) = ingest_sender.send((bytes_len, buffer)) {
error!(target: Target::file_mail(), channel = id; "Ingest server write error: {e:?}");
is_terminated.store(true, Ordering::SeqCst);
break;
}
} else {
break;
}
}
drop(ingest_reader);
ingest_is_running.store(false, Ordering::SeqCst);
if let Err(e) = channel_mgr.wait(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
if let Err(e) = error_reader_thread.join() {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
}
Ok(())
}

View File

@ -1,50 +0,0 @@
use std::thread;
use log::*;
pub mod folder;
pub mod ingest;
pub mod playlist;
pub use folder::watchman;
pub use ingest::ingest_server;
pub use playlist::CurrentProgram;
use crate::player::{
controller::ChannelManager,
utils::{folder::FolderSource, Media},
};
use crate::utils::{config::ProcessMode::*, logging::Target};
/// Create a source iterator from playlist, or from folder.
pub fn source_generator(manager: ChannelManager) -> Box<dyn Iterator<Item = Media>> {
let config = manager.config.lock().unwrap().clone();
let id = config.general.channel_id;
let is_terminated = manager.is_terminated.clone();
let current_list = manager.current_list.clone();
match config.processing.mode {
Folder => {
info!(target: Target::file_mail(), channel = id; "Playout in folder mode");
debug!(target: Target::file_mail(), channel = id;
"Monitor folder: <b><magenta>{:?}</></b>",
config.channel.storage
);
let config_clone = config.clone();
let folder_source = FolderSource::new(&config, manager);
let list_clone = current_list.clone();
// Spawn a thread to monitor folder for file changes.
thread::spawn(move || watchman(config_clone, is_terminated.clone(), list_clone));
Box::new(folder_source) as Box<dyn Iterator<Item = Media>>
}
Playlist => {
info!(target: Target::file_mail(), channel = id; "Playout in playlist mode");
let program = CurrentProgram::new(manager);
Box::new(program) as Box<dyn Iterator<Item = Media>>
}
}
}

View File

@ -1,844 +0,0 @@
use std::{
path::Path,
sync::{
atomic::{AtomicBool, Ordering},
Arc, Mutex,
},
};
use log::*;
use crate::db::handles;
use crate::player::{
controller::ChannelManager,
utils::{
gen_dummy, get_delta, is_close, is_remote,
json_serializer::{read_json, set_defaults},
loop_filler, loop_image, modified_time, seek_and_length, time_in_seconds, JsonPlaylist,
Media, MediaProbe,
},
};
use crate::utils::{
config::{PlayoutConfig, IMAGE_FORMAT},
logging::Target,
};
/// Struct for current playlist.
///
/// Here we prepare the init clip and build a iterator where we pull our clips.
#[derive(Debug)]
pub struct CurrentProgram {
id: i32,
config: PlayoutConfig,
manager: ChannelManager,
start_sec: f64,
end_sec: f64,
json_playlist: JsonPlaylist,
current_node: Media,
is_terminated: Arc<AtomicBool>,
last_json_path: Option<String>,
last_node_ad: bool,
}
/// Prepare a playlist iterator.
impl CurrentProgram {
pub fn new(manager: ChannelManager) -> Self {
let config = manager.config.lock().unwrap().clone();
let is_terminated = manager.is_terminated.clone();
Self {
id: config.general.channel_id,
config: config.clone(),
manager,
start_sec: config.playlist.start_sec.unwrap(),
end_sec: config.playlist.length_sec.unwrap(),
json_playlist: JsonPlaylist::new(
"1970-01-01".to_string(),
config.playlist.start_sec.unwrap(),
),
current_node: Media::new(0, "", false),
is_terminated,
last_json_path: None,
last_node_ad: false,
}
}
// Check if there is no current playlist or file got updated,
// and when is so load/reload it.
fn load_or_update_playlist(&mut self, seek: bool) {
let mut get_current = false;
let mut reload = false;
if let Some(path) = self.json_playlist.path.clone() {
if (Path::new(&path).is_file() || is_remote(&path))
&& self.json_playlist.modified != modified_time(&path)
{
info!(target: Target::file_mail(), channel = self.id; "Reload playlist <b><magenta>{path}</></b>");
self.manager.list_init.store(true, Ordering::SeqCst);
get_current = true;
reload = true;
}
} else {
get_current = true;
}
if get_current {
self.json_playlist = read_json(
&mut self.config,
self.manager.current_list.clone(),
self.json_playlist.path.clone(),
self.is_terminated.clone(),
seek,
false,
);
if !reload {
if let Some(file) = &self.json_playlist.path {
info!(target: Target::file_mail(), channel = self.id; "Read playlist: <b><magenta>{file}</></b>");
}
if *self
.manager
.channel
.lock()
.unwrap()
.last_date
.clone()
.unwrap_or_default()
!= self.json_playlist.date
{
self.set_status(self.json_playlist.date.clone());
}
self.manager
.current_date
.lock()
.unwrap()
.clone_from(&self.json_playlist.date);
}
self.manager
.current_list
.lock()
.unwrap()
.clone_from(&self.json_playlist.program);
if self.json_playlist.path.is_none() {
trace!("missing playlist");
self.current_node = Media::new(0, "", false);
self.manager.list_init.store(true, Ordering::SeqCst);
self.manager.current_index.store(0, Ordering::SeqCst);
}
}
}
// Check if day is past and it is time for a new playlist.
fn check_for_playlist(&mut self, seek: bool) -> bool {
let (delta, total_delta) = get_delta(&self.config, &time_in_seconds());
let mut next = false;
let duration = if self.current_node.duration >= self.current_node.out {
self.current_node.duration
} else {
// maybe out is longer to be able to loop
self.current_node.out
};
let node_index = self.current_node.index.unwrap_or_default();
let mut next_start =
self.current_node.begin.unwrap_or_default() - self.start_sec + duration + delta;
if node_index > 0 && node_index == self.manager.current_list.lock().unwrap().len() - 1 {
next_start += self.config.general.stop_threshold;
}
trace!(
"delta: {delta} | total_delta: {total_delta}, index: {node_index} \n next_start: {next_start} | end_sec: {} | source {}",
self.end_sec,
self.current_node.source
);
// Check if we over the target length or we are close to it, if so we load the next playlist.
if !self.config.playlist.infinit
&& (next_start >= self.end_sec
|| is_close(total_delta, 0.0, 2.0)
|| is_close(total_delta, self.end_sec, 2.0))
{
trace!("get next day");
next = true;
self.json_playlist = read_json(
&mut self.config,
self.manager.current_list.clone(),
None,
self.is_terminated.clone(),
false,
true,
);
if let Some(file) = &self.json_playlist.path {
info!(target: Target::file_mail(), channel = self.id; "Read next playlist: <b><magenta>{file}</></b>");
}
self.manager.list_init.store(false, Ordering::SeqCst);
self.set_status(self.json_playlist.date.clone());
self.manager
.current_list
.lock()
.unwrap()
.clone_from(&self.json_playlist.program);
self.manager.current_index.store(0, Ordering::SeqCst);
} else {
self.load_or_update_playlist(seek)
}
next
}
fn set_status(&mut self, date: String) {
if self.manager.channel.lock().unwrap().last_date != Some(date.clone())
&& self.manager.channel.lock().unwrap().time_shift != 0.0
{
info!(target: Target::file_mail(), channel = self.id; "Reset playout status");
}
self.manager.current_date.lock().unwrap().clone_from(&date);
self.manager
.channel
.lock()
.unwrap()
.last_date
.clone_from(&Some(date.clone()));
self.manager.channel.lock().unwrap().time_shift = 0.0;
let db_pool = self.manager.db_pool.clone().unwrap();
if let Err(e) = tokio::runtime::Runtime::new()
.unwrap()
.block_on(handles::update_stat(
&db_pool,
self.config.general.channel_id,
date,
0.0,
))
{
error!(target: Target::file_mail(), channel = self.id; "Unable to write status: {e}");
};
}
// Check if last and/or next clip is a advertisement.
fn last_next_ad(&mut self, node: &mut Media) {
let index = self.manager.current_index.load(Ordering::SeqCst);
let current_list = self.manager.current_list.lock().unwrap();
if index + 1 < current_list.len() && &current_list[index + 1].category == "advertisement" {
node.next_ad = true;
}
if index > 0
&& index < current_list.len()
&& &current_list[index - 1].category == "advertisement"
{
node.last_ad = true;
}
}
// Get current time and when we are before start time,
// we add full seconds of a day to it.
fn get_current_time(&mut self) -> f64 {
let mut time_sec = time_in_seconds();
if time_sec < self.start_sec {
time_sec += 86400.0 // self.config.playlist.length_sec.unwrap();
}
time_sec
}
// On init or reload we need to seek for the current clip.
fn get_current_clip(&mut self) {
let mut time_sec = self.get_current_time();
let shift = self.manager.channel.lock().unwrap().time_shift;
if shift != 0.0 {
info!(target: Target::file_mail(), channel = self.id; "Shift playlist start for <yellow>{shift:.3}</> seconds");
time_sec += shift;
}
if self.config.playlist.infinit
&& self.json_playlist.length.unwrap() < 86400.0
&& time_sec > self.json_playlist.length.unwrap() + self.start_sec
{
self.recalculate_begin(true)
}
for (i, item) in self.manager.current_list.lock().unwrap().iter().enumerate() {
if item.begin.unwrap() + item.out - item.seek > time_sec {
self.manager.list_init.store(false, Ordering::SeqCst);
self.manager.current_index.store(i, Ordering::SeqCst);
break;
}
}
}
// Prepare init clip.
fn init_clip(&mut self) -> bool {
trace!("init_clip");
self.get_current_clip();
let mut is_filler = false;
if !self.manager.list_init.load(Ordering::SeqCst) {
let time_sec = self.get_current_time();
let index = self.manager.current_index.load(Ordering::SeqCst);
let nodes = self.manager.current_list.lock().unwrap();
let last_index = nodes.len() - 1;
// de-instance node to preserve original values in list
let mut node_clone = nodes[index].clone();
// Important! When no manual drop is happen here, lock is still active in handle_list_init
drop(nodes);
trace!("Clip from init: {}", node_clone.source);
node_clone.seek += time_sec
- (node_clone.begin.unwrap() - self.manager.channel.lock().unwrap().time_shift);
self.last_next_ad(&mut node_clone);
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
self.current_node =
handle_list_init(&self.config, node_clone, &self.manager, last_index);
if self
.current_node
.source
.contains(&self.config.channel.storage.to_string_lossy().to_string())
|| self.current_node.source.contains("color=c=#121212")
{
is_filler = true;
}
}
is_filler
}
fn fill_end(&mut self, total_delta: f64) {
// Fill end from playlist
let index = self.manager.current_index.load(Ordering::SeqCst);
let mut media = Media::new(index, "", false);
media.begin = Some(time_in_seconds());
media.duration = total_delta;
media.out = total_delta;
self.last_next_ad(&mut media);
self.current_node = gen_source(&self.config, media, &self.manager, 0);
self.manager
.current_list
.lock()
.unwrap()
.push(self.current_node.clone());
self.current_node.last_ad = self.last_node_ad;
self.current_node
.add_filter(&self.config, &self.manager.filter_chain);
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
}
fn recalculate_begin(&mut self, extend: bool) {
debug!(target: Target::file_mail(), channel = self.id; "Infinit playlist reaches end, recalculate clip begins. Extend: <yellow>{extend}</>");
let mut time_sec = time_in_seconds();
if extend {
// Calculate the elapsed time since the playlist start
let elapsed_sec = if time_sec >= self.start_sec {
time_sec - self.start_sec
} else {
time_sec + 86400.0 - self.start_sec
};
// Time passed within the current playlist loop
let time_in_current_loop = elapsed_sec % self.json_playlist.length.unwrap();
// Adjust the start time so that the playlist starts at the correct point in time
time_sec -= time_in_current_loop;
}
self.json_playlist.start_sec = Some(time_sec);
set_defaults(&mut self.json_playlist);
self.manager
.current_list
.lock()
.unwrap()
.clone_from(&self.json_playlist.program);
}
}
/// Build the playlist iterator
impl Iterator for CurrentProgram {
type Item = Media;
fn next(&mut self) -> Option<Self::Item> {
self.last_json_path.clone_from(&self.json_playlist.path);
self.last_node_ad = self.current_node.last_ad;
self.check_for_playlist(self.manager.list_init.load(Ordering::SeqCst));
if self.manager.list_init.load(Ordering::SeqCst) {
trace!("Init playlist, from next iterator");
let mut init_clip_is_filler = false;
if self.json_playlist.path.is_some() {
init_clip_is_filler = self.init_clip();
}
if self.manager.list_init.load(Ordering::SeqCst) && !init_clip_is_filler {
// On init load, playlist could be not long enough, or clips are not found
// so we fill the gap with a dummy.
trace!("Init clip is no filler");
let mut current_time = time_in_seconds();
let (_, total_delta) = get_delta(&self.config, &current_time);
if self.start_sec > current_time {
current_time += self.end_sec + 1.0;
}
let mut last_index = 0;
let length = self.manager.current_list.lock().unwrap().len();
if length > 0 {
last_index = length - 1;
}
let mut media = Media::new(length, "", false);
media.begin = Some(current_time);
media.duration = total_delta;
media.out = total_delta;
self.last_next_ad(&mut media);
self.current_node = gen_source(&self.config, media, &self.manager, last_index);
}
return Some(self.current_node.clone());
}
if self.manager.current_index.load(Ordering::SeqCst)
< self.manager.current_list.lock().unwrap().len()
{
// get next clip from current playlist
let mut is_last = false;
let index = self.manager.current_index.load(Ordering::SeqCst);
let node_list = self.manager.current_list.lock().unwrap();
let mut node = node_list[index].clone();
let last_index = node_list.len() - 1;
drop(node_list);
if index == last_index {
is_last = true
}
self.last_next_ad(&mut node);
self.current_node =
timed_source(node, &self.config, is_last, &self.manager, last_index);
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
Some(self.current_node.clone())
} else {
let (_, total_delta) = get_delta(&self.config, &self.start_sec);
if !self.config.playlist.infinit
&& self.last_json_path == self.json_playlist.path
&& total_delta.abs() > 1.0
{
// Playlist is to early finish,
// and if we have to fill it with a placeholder.
trace!("Total delta on list end: {total_delta}");
self.fill_end(total_delta);
return Some(self.current_node.clone());
}
// Get first clip from next playlist.
let c_list = self.manager.current_list.lock().unwrap();
let mut first_node = c_list[0].clone();
drop(c_list);
if self.config.playlist.infinit {
self.recalculate_begin(false)
}
self.manager.current_index.store(0, Ordering::SeqCst);
self.last_next_ad(&mut first_node);
first_node.last_ad = self.last_node_ad;
self.current_node = gen_source(&self.config, first_node, &self.manager, 0);
self.manager.current_index.store(1, Ordering::SeqCst);
Some(self.current_node.clone())
}
}
}
/// Prepare input clip:
///
/// - check begin and length from clip
/// - return clip only if we are in 24 hours time range
fn timed_source(
node: Media,
config: &PlayoutConfig,
last: bool,
manager: &ChannelManager,
last_index: usize,
) -> Media {
let id = config.general.channel_id;
let time_shift = manager.channel.lock().unwrap().time_shift;
let current_date = manager.current_date.lock().unwrap().clone();
let last_date = manager.channel.lock().unwrap().last_date.clone();
let (delta, total_delta) = get_delta(config, &node.begin.unwrap());
let mut shifted_delta = delta;
let mut new_node = node.clone();
new_node.process = Some(false);
trace!(
"Node - begin: {} | source: {}",
node.begin.unwrap(),
node.source
);
trace!(
"timed source is last: {last} | current_date: {current_date} | last_date: {last_date:?} | time_shift: {time_shift}"
);
if config.playlist.length.contains(':') {
if Some(current_date) == last_date && time_shift != 0.0 {
shifted_delta = delta - time_shift;
debug!(target: Target::file_mail(), channel = id; "Delta: <yellow>{shifted_delta:.3}</>, shifted: <yellow>{delta:.3}</>");
} else {
debug!(target: Target::file_mail(), channel = id; "Delta: <yellow>{shifted_delta:.3}</>");
}
if config.general.stop_threshold > 0.0
&& shifted_delta.abs() > config.general.stop_threshold
{
if manager.is_alive.load(Ordering::SeqCst) {
error!(target: Target::file_mail(), channel = id; "Clip begin out of sync for <yellow>{delta:.3}</> seconds.");
}
new_node.cmd = None;
return new_node;
}
}
if (total_delta > node.out - node.seek && !last)
|| node.index.unwrap() < 2
|| !config.playlist.length.contains(':')
|| config.playlist.infinit
{
// when we are in the 24 hour range, get the clip
new_node.process = Some(true);
new_node = gen_source(config, node, manager, last_index);
} else if total_delta <= 0.0 {
info!(target: Target::file_mail(), channel = id; "Begin is over play time, skip: {}", node.source);
} else if total_delta < node.duration - node.seek || last {
new_node = handle_list_end(config, node, total_delta, manager, last_index);
}
new_node
}
fn duplicate_for_seek_and_loop(node: &mut Media, current_list: &Arc<Mutex<Vec<Media>>>) {
let mut nodes = current_list.lock().unwrap();
let index = node.index.unwrap_or_default();
let mut node_duplicate = node.clone();
node_duplicate.seek = 0.0;
let orig_seek = node.seek;
node.out = node.duration;
if node.seek > node.duration {
node.seek %= node.duration;
node_duplicate.out = node_duplicate.out - orig_seek - (node.out - node.seek);
} else {
node_duplicate.out -= node_duplicate.duration;
}
if node.seek == node.out {
node.seek = node_duplicate.seek;
node.out = node_duplicate.out;
} else if node_duplicate.out - node_duplicate.seek > 1.2 {
node_duplicate.begin =
Some(node_duplicate.begin.unwrap_or_default() + (node.out - node.seek));
nodes.insert(index + 1, node_duplicate);
for (i, item) in nodes.iter_mut().enumerate() {
item.index = Some(i);
}
}
}
/// Generate the source CMD, or when clip not exist, get a dummy.
pub fn gen_source(
config: &PlayoutConfig,
mut node: Media,
manager: &ChannelManager,
last_index: usize,
) -> Media {
let node_index = node.index.unwrap_or_default();
let mut duration = node.out - node.seek;
if duration < 1.0 {
warn!(
target: Target::file_mail(), channel = config.general.channel_id;
"Clip is less then 1 second long (<yellow>{duration:.3}</>), adjust length."
);
duration = 1.2;
if node.seek > 1.0 {
node.seek -= 1.2;
} else {
node.out = 1.2;
}
}
trace!("Clip new length: {duration}, duration: {}", node.duration);
if node.probe.is_none() && !node.source.is_empty() {
if let Err(e) = node.add_probe(true) {
trace!("{e:?}");
};
} else {
trace!("Node has a probe...")
}
// separate if condition, because of node.add_probe() in last condition
if node.probe.is_some() {
if node
.source
.rsplit_once('.')
.map(|(_, e)| e.to_lowercase())
.filter(|c| IMAGE_FORMAT.contains(&c.as_str()))
.is_some()
{
node.cmd = Some(loop_image(config, &node));
} else {
if node.seek > 0.0 && node.out > node.duration {
warn!(target: Target::file_mail(), channel = config.general.channel_id; "Clip loops and has seek value: duplicate clip to separate loop and seek.");
duplicate_for_seek_and_loop(&mut node, &manager.current_list);
}
node.cmd = Some(seek_and_length(config, &mut node));
}
} else {
trace!("clip index: {node_index} | last index: {last_index}");
// Last index is the index from the last item from the node list.
if node_index < last_index {
error!(target: Target::file_mail(), channel = config.general.channel_id; "Source not found: <b><magenta>{}</></b>", node.source);
}
let mut fillers = vec![];
match manager.filler_list.try_lock() {
Ok(list) => fillers = list.to_vec(),
Err(e) => {
error!(target: Target::file_mail(), channel = config.general.channel_id; "Lock filler list error: {e}")
}
}
// Set list_init to true, to stay in sync.
manager.list_init.store(true, Ordering::SeqCst);
if config.storage.filler_path.is_dir() && !fillers.is_empty() {
let index = manager.filler_index.fetch_add(1, Ordering::SeqCst);
let mut filler_media = fillers[index].clone();
trace!("take filler: {}", filler_media.source);
if index == fillers.len() - 1 {
// reset index for next round
manager.filler_index.store(0, Ordering::SeqCst)
}
if filler_media.probe.is_none() {
if let Err(e) = filler_media.add_probe(false) {
error!(target: Target::file_mail(), channel = config.general.channel_id; "{e:?}");
};
}
if filler_media.duration > duration {
filler_media.out = duration;
}
node.source = filler_media.source;
node.seek = 0.0;
node.out = filler_media.out;
node.duration = filler_media.duration;
node.cmd = Some(loop_filler(config, &node));
node.probe = filler_media.probe;
} else {
match MediaProbe::new(&config.storage.filler_path.to_string_lossy()) {
Ok(probe) => {
if config
.storage
.filler_path
.to_string_lossy()
.to_string()
.rsplit_once('.')
.map(|(_, e)| e.to_lowercase())
.filter(|c| IMAGE_FORMAT.contains(&c.as_str()))
.is_some()
{
node.source = config
.storage
.filler_path
.clone()
.to_string_lossy()
.to_string();
node.cmd = Some(loop_image(config, &node));
node.probe = Some(probe);
} else if let Some(filler_duration) = probe
.clone()
.format
.duration
.and_then(|d| d.parse::<f64>().ok())
{
// Create placeholder from config filler.
let mut filler_out = filler_duration;
if filler_duration > duration {
filler_out = duration;
}
node.source = config
.storage
.filler_path
.clone()
.to_string_lossy()
.to_string();
node.seek = 0.0;
node.out = filler_out;
node.duration = filler_duration;
node.cmd = Some(loop_filler(config, &node));
node.probe = Some(probe);
} else {
// Create colored placeholder.
let (source, cmd) = gen_dummy(config, duration);
node.source = source;
node.cmd = Some(cmd);
}
}
Err(e) => {
// Create colored placeholder.
error!(target: Target::file_mail(), channel = config.general.channel_id; "Filler error: {e}");
let mut dummy_duration = 60.0;
if dummy_duration > duration {
dummy_duration = duration;
}
let (source, cmd) = gen_dummy(config, dummy_duration);
node.seek = 0.0;
node.out = dummy_duration;
node.duration = dummy_duration;
node.source = source;
node.cmd = Some(cmd);
}
}
}
warn!(
target: Target::file_mail(), channel = config.general.channel_id;
"Generate filler with <yellow>{:.2}</> seconds length!",
node.out
);
}
node.add_filter(config, &manager.filter_chain.clone());
trace!(
"return gen_source: {}, seek: {}, out: {}",
node.source,
node.seek,
node.out,
);
node
}
/// Handle init clip, but this clip can be the last one in playlist,
/// this we have to figure out and calculate the right length.
fn handle_list_init(
config: &PlayoutConfig,
mut node: Media,
manager: &ChannelManager,
last_index: usize,
) -> Media {
debug!(target: Target::file_mail(), channel = config.general.channel_id; "Playlist init");
let (_, total_delta) = get_delta(config, &node.begin.unwrap());
if !config.playlist.infinit && node.out - node.seek > total_delta {
node.out = total_delta + node.seek;
}
gen_source(config, node, manager, last_index)
}
/// when we come to last clip in playlist,
/// or when we reached total playtime,
/// we end up here
fn handle_list_end(
config: &PlayoutConfig,
mut node: Media,
total_delta: f64,
manager: &ChannelManager,
last_index: usize,
) -> Media {
debug!(target: Target::file_mail(), channel = config.general.channel_id; "Last clip from day");
let mut out = if node.seek > 0.0 {
node.seek + total_delta
} else {
if node.duration > total_delta {
warn!(target: Target::file_mail(), channel = config.general.channel_id; "Adjust clip duration to: <yellow>{total_delta:.2}</>");
}
total_delta
};
// out can't be longer then duration
if out > node.duration {
out = node.duration
}
if node.duration > total_delta && total_delta > 1.0 && node.duration - node.seek >= total_delta
{
node.out = out;
} else {
warn!(target: Target::file_mail(), channel = config.general.channel_id; "Playlist is not long enough: <yellow>{total_delta:.2}</> seconds needed");
}
node.process = Some(true);
gen_source(config, node, manager, last_index)
}

View File

@ -1,5 +0,0 @@
pub mod controller;
pub mod filter;
pub mod input;
pub mod output;
pub mod utils;

View File

@ -1,86 +0,0 @@
use std::process::{self, Command, Stdio};
use log::*;
use crate::player::filter::v_drawtext;
use crate::utils::{config::PlayoutConfig, logging::Target};
use crate::vec_strings;
/// Desktop Output
///
/// Instead of streaming, we run a ffplay instance and play on desktop.
pub fn output(config: &PlayoutConfig, log_format: &str) -> process::Child {
let mut enc_filter: Vec<String> = vec![];
let mut enc_cmd = vec_strings!["-hide_banner", "-nostats", "-v", log_format];
if let Some(encoder_input_cmd) = &config.advanced.encoder.input_cmd {
enc_cmd.append(&mut encoder_input_cmd.clone());
}
enc_cmd.append(&mut vec_strings![
"-autoexit",
"-i",
"pipe:0",
"-window_title",
"ffplayout"
]);
if let Some(mut cmd) = config.output.output_cmd.clone() {
if !cmd.iter().any(|i| {
[
"-c",
"-c:v",
"-c:v:0",
"-b:v",
"-b:v:0",
"-vcodec",
"-c:a",
"-acodec",
"-crf",
"-map",
"-filter_complex",
]
.contains(&i.as_str())
}) {
enc_cmd.append(&mut cmd);
} else {
warn!(target: Target::file_mail(), channel = config.general.channel_id; "ffplay doesn't support given output parameters, they will be skipped!");
}
}
if config.text.add_text && !config.text.text_from_filename && !config.processing.audio_only {
if let Some(socket) = config.text.zmq_stream_socket.clone() {
debug!(target: Target::file_mail(), channel = config.general.channel_id;
"Using drawtext filter, listening on address: <yellow>{}</>",
socket
);
let mut filter: String = "null,".to_string();
filter.push_str(v_drawtext::filter_node(config, None, &None).as_str());
enc_filter = vec!["-vf".to_string(), filter];
}
}
enc_cmd.append(&mut enc_filter);
debug!(target: Target::file_mail(), channel = config.general.channel_id;
"Encoder CMD: <bright-blue>\"ffplay {}\"</>",
enc_cmd.join(" ")
);
let enc_proc = match Command::new("ffplay")
.args(enc_cmd)
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Err(e) => {
error!(target: Target::file_mail(), channel = config.general.channel_id; "couldn't spawn encoder process: {e}");
panic!("couldn't spawn encoder process: {e}")
}
Ok(proc) => proc,
};
enc_proc
}

View File

@ -1,319 +0,0 @@
/*
This module write the files compression directly to a hls (m3u8) playlist,
without pre- and post-processing.
Example config:
out:
output_param: >-
...
-flags +cgop
-f hls
-hls_time 6
-hls_list_size 600
-hls_flags append_list+delete_segments+omit_endlist+program_date_time
-hls_segment_filename /var/www/html/live/stream-%d.ts /var/www/html/live/stream.m3u8
*/
use std::{
io::{BufRead, BufReader},
process::{Command, Stdio},
sync::atomic::Ordering,
thread::{self, sleep},
time::{Duration, SystemTime},
};
use log::*;
use crate::utils::{logging::log_line, task_runner};
use crate::vec_strings;
use crate::{
player::{
controller::{ChannelManager, ProcessUnit::*},
input::source_generator,
utils::{
get_delta, is_free_tcp_port, prepare_output_cmd, sec_to_time, stderr_reader,
valid_stream, Media,
},
},
utils::{errors::ProcessError, logging::Target},
};
/// Ingest Server for HLS
fn ingest_to_hls_server(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock().unwrap();
let id = config.general.channel_id;
let playlist_init = manager.list_init.clone();
let chain = manager.filter_chain.clone();
let mut error_count = 0;
let mut server_prefix = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
let stream_input = config.ingest.input_cmd.clone().unwrap();
let mut dummy_media = Media::new(0, "Live Stream", false);
dummy_media.unit = Ingest;
let is_terminated = manager.is_terminated.clone();
let ingest_is_running = manager.ingest_is_running.clone();
if let Some(ingest_input_cmd) = &config.advanced.ingest.input_cmd {
server_prefix.append(&mut ingest_input_cmd.clone());
}
server_prefix.append(&mut stream_input.clone());
if config.processing.vtt_enable {
let vtt_dummy = config
.channel
.storage
.join(config.processing.vtt_dummy.clone().unwrap_or_default());
if vtt_dummy.is_file() {
server_prefix.append(&mut vec_strings!["-i", vtt_dummy.to_string_lossy()]);
}
}
let mut is_running;
if let Some(url) = stream_input.iter().find(|s| s.contains("://")) {
if !is_free_tcp_port(id, url) {
manager.channel.lock().unwrap().active = false;
manager.stop_all();
} else {
info!(target: Target::file_mail(), channel = id; "Start ingest server, listening on: <b><magenta>{url}</></b>");
}
};
drop(config);
loop {
let config = manager.config.lock().unwrap().clone();
dummy_media.add_filter(&config, &chain);
let server_cmd = prepare_output_cmd(&config, server_prefix.clone(), &dummy_media.filter);
let timer = SystemTime::now();
debug!(target: Target::file_mail(), channel = id;
"Server CMD: <bright-blue>\"ffmpeg {}\"</>",
server_cmd.join(" ")
);
let proc_ctl = manager.clone();
let mut server_proc = match Command::new("ffmpeg")
.args(server_cmd.clone())
.stderr(Stdio::piped())
.spawn()
{
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn ingest server: {e}");
panic!("couldn't spawn ingest server: {e}");
}
Ok(proc) => proc,
};
let server_err = BufReader::new(server_proc.stderr.take().unwrap());
*manager.ingest.lock().unwrap() = Some(server_proc);
is_running = false;
for line in server_err.lines() {
let line = line?;
if line.contains("rtmp") && line.contains("Unexpected stream") && !valid_stream(&line) {
warn!(target: Target::file_mail(), channel = id; "Unexpected ingest stream: {line}");
if let Err(e) = proc_ctl.stop(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}");
};
}
if !is_running && line.contains("Input #0") {
ingest_is_running.store(true, Ordering::SeqCst);
playlist_init.store(true, Ordering::SeqCst);
is_running = true;
info!(target: Target::file_mail(), channel = id; "Switch from {} to live ingest", config.processing.mode);
if let Err(e) = manager.stop(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}");
}
}
if ingest_is_running.load(Ordering::SeqCst) {
log_line(&line, &config.logging.ingest_level);
} else {
log_line(&line, &config.logging.ffmpeg_level);
}
}
if ingest_is_running.load(Ordering::SeqCst) {
info!(target: Target::file_mail(), channel = id; "Switch from live ingest to {}", config.processing.mode);
}
ingest_is_running.store(false, Ordering::SeqCst);
if let Err(e) = manager.wait(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
if is_terminated.load(Ordering::SeqCst) {
break;
}
if let Ok(elapsed) = timer.elapsed() {
if elapsed.as_millis() < 300 {
error_count += 1;
if error_count > 10 {
error!(target: Target::file_mail(), channel = id; "Reach fatal error count in ingest, terminate channel!");
manager.channel.lock().unwrap().active = false;
manager.stop_all();
break;
}
} else {
error_count = 0;
}
}
}
Ok(())
}
/// HLS Writer
///
/// Write with single ffmpeg instance directly to a HLS playlist.
pub fn write_hls(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock()?.clone();
let id = config.general.channel_id;
let current_media = manager.current_media.clone();
let is_terminated = manager.is_terminated.clone();
let ff_log_format = format!("level+{}", config.logging.ffmpeg_level.to_lowercase());
let channel_mgr_2 = manager.clone();
let ingest_is_running = manager.ingest_is_running.clone();
let get_source = source_generator(manager.clone());
// spawn a thread for ffmpeg ingest server and create a channel for package sending
if config.ingest.enable {
thread::spawn(move || ingest_to_hls_server(channel_mgr_2));
}
let mut error_count = 0;
for node in get_source {
*current_media.lock().unwrap() = Some(node.clone());
let ignore = config.logging.ignore_lines.clone();
let timer = SystemTime::now();
if is_terminated.load(Ordering::SeqCst) {
break;
}
let mut cmd = match &node.cmd {
Some(cmd) => cmd.clone(),
None => break,
};
if !node.process.unwrap() {
continue;
}
info!(target: Target::file_mail(), channel = id;
"Play for <yellow>{}</>: <b><magenta>{}</></b>",
sec_to_time(node.out - node.seek),
node.source
);
if config.task.enable {
if config.task.path.is_file() {
let channel_mgr_3 = manager.clone();
thread::spawn(move || task_runner::run(channel_mgr_3));
} else {
error!(target: Target::file_mail(), channel = id;
"<bright-blue>{:?}</> executable not exists!",
config.task.path
);
}
}
let mut dec_prefix = vec_strings!["-hide_banner", "-nostats", "-v", &ff_log_format];
if let Some(decoder_input_cmd) = &config.advanced.decoder.input_cmd {
dec_prefix.append(&mut decoder_input_cmd.clone());
}
let mut read_rate = 1.0;
if let Some(begin) = &node.begin {
let (delta, _) = get_delta(&config, begin);
let duration = node.out - node.seek;
let speed = duration / (duration + delta);
if node.seek == 0.0
&& speed > 0.0
&& speed < 1.3
&& delta < config.general.stop_threshold
{
read_rate = speed;
}
}
dec_prefix.append(&mut vec_strings!["-readrate", read_rate]);
dec_prefix.append(&mut cmd);
let dec_cmd = prepare_output_cmd(&config, dec_prefix, &node.filter);
debug!(target: Target::file_mail(), channel = id;
"HLS writer CMD: <bright-blue>\"ffmpeg {}\"</>",
dec_cmd.join(" ")
);
let mut dec_proc = match Command::new("ffmpeg")
.args(dec_cmd)
.stderr(Stdio::piped())
.spawn()
{
Ok(proc) => proc,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn ffmpeg process: {e}");
panic!("couldn't spawn ffmpeg process: {e}")
}
};
let dec_err = BufReader::new(dec_proc.stderr.take().unwrap());
*manager.decoder.lock().unwrap() = Some(dec_proc);
if let Err(e) = stderr_reader(dec_err, ignore, Decoder, manager.clone()) {
error!(target: Target::file_mail(), channel = id; "{e:?}")
};
if let Err(e) = manager.wait(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}");
}
while ingest_is_running.load(Ordering::SeqCst) {
sleep(Duration::from_secs(1));
}
if let Ok(elapsed) = timer.elapsed() {
if elapsed.as_millis() < 300 {
error_count += 1;
if error_count > 10 {
error!(target: Target::file_mail(), channel = id; "Reach fatal error count, terminate channel!");
break;
}
} else {
error_count = 0;
}
}
}
sleep(Duration::from_secs(1));
manager.stop_all();
Ok(())
}

View File

@ -1,278 +0,0 @@
use std::{
io::{prelude::*, BufReader, BufWriter, Read},
process::{Command, Stdio},
sync::{atomic::Ordering, mpsc::sync_channel},
thread::{self, sleep},
time::{Duration, SystemTime},
};
use log::*;
mod desktop;
mod hls;
mod null;
mod stream;
pub use hls::write_hls;
use crate::player::{
controller::{ChannelManager, ProcessUnit::*},
input::{ingest_server, source_generator},
utils::{sec_to_time, stderr_reader},
};
use crate::utils::{config::OutputMode::*, errors::ProcessError, logging::Target, task_runner};
use crate::vec_strings;
/// Player
///
/// Here we create the input file loop, from playlist, or folder source.
/// Then we read the stdout from the reader ffmpeg instance
/// and write it to the stdin from the streamer ffmpeg instance.
/// If it is configured we also fire up a ffmpeg ingest server instance,
/// for getting live feeds.
/// When a live ingest arrive, it stops the current playing and switch to the live source.
/// When ingest stops, it switch back to playlist/folder mode.
pub fn player(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock()?.clone();
let id = config.general.channel_id;
let config_clone = config.clone();
let ff_log_format = format!("level+{}", config.logging.ffmpeg_level.to_lowercase());
let ignore_enc = config.logging.ignore_lines.clone();
let mut buffer = [0; 65088];
let mut live_on = false;
let playlist_init = manager.list_init.clone();
let is_terminated = manager.is_terminated.clone();
let ingest_is_running = manager.ingest_is_running.clone();
// get source iterator
let node_sources = source_generator(manager.clone());
// get ffmpeg output instance
let mut enc_proc = match config.output.mode {
Desktop => desktop::output(&config, &ff_log_format),
Null => null::output(&config, &ff_log_format),
Stream => stream::output(&config, &ff_log_format),
_ => panic!("Output mode doesn't exists!"),
};
let mut enc_writer = BufWriter::new(enc_proc.stdin.take().unwrap());
let enc_err = BufReader::new(enc_proc.stderr.take().unwrap());
*manager.encoder.lock().unwrap() = Some(enc_proc);
let enc_p_ctl = manager.clone();
// spawn a thread to log ffmpeg output error messages
let error_encoder_thread =
thread::spawn(move || stderr_reader(enc_err, ignore_enc, Encoder, enc_p_ctl));
let channel_mgr_2 = manager.clone();
let mut ingest_receiver = None;
// spawn a thread for ffmpeg ingest server and create a channel for package sending
if config.ingest.enable {
let (ingest_sender, rx) = sync_channel(96);
ingest_receiver = Some(rx);
thread::spawn(move || ingest_server(config_clone, ingest_sender, channel_mgr_2));
}
drop(config);
let mut error_count = 0;
'source_iter: for node in node_sources {
let config = manager.config.lock()?.clone();
*manager.current_media.lock().unwrap() = Some(node.clone());
let ignore_dec = config.logging.ignore_lines.clone();
let timer = SystemTime::now();
if is_terminated.load(Ordering::SeqCst) {
debug!(target: Target::file_mail(), channel = id; "Playout is terminated, break out from source loop");
break;
}
trace!("Decoder CMD: {:?}", node.cmd);
let mut cmd = match &node.cmd {
Some(cmd) => cmd.clone(),
None => break,
};
if !node.process.unwrap() {
// process true/false differs from node.cmd = None in that way,
// that source is valid but to show for playing,
// so better skip it and jump to the next one.
continue;
}
let c_index = if cfg!(debug_assertions) {
format!(
" ({}/{})",
node.index.unwrap() + 1,
manager.current_list.lock().unwrap().len()
)
} else {
String::new()
};
info!(target: Target::file_mail(), channel = id;
"Play for <yellow>{}</>{c_index}: <b><magenta>{} {}</></b>",
sec_to_time(node.out - node.seek),
node.source,
node.audio
);
if config.task.enable {
if config.task.path.is_file() {
let channel_mgr_3 = manager.clone();
thread::spawn(move || task_runner::run(channel_mgr_3));
} else {
error!(target: Target::file_mail(), channel = id;
"<bright-blue>{:?}</> executable not exists!",
config.task.path
);
}
}
let mut dec_cmd = vec_strings!["-hide_banner", "-nostats", "-v", &ff_log_format];
if let Some(decoder_input_cmd) = &config.advanced.decoder.input_cmd {
dec_cmd.append(&mut decoder_input_cmd.clone());
}
dec_cmd.append(&mut cmd);
if let Some(mut filter) = node.filter {
dec_cmd.append(&mut filter.cmd());
dec_cmd.append(&mut filter.map());
}
if config.processing.vtt_enable && dec_cmd.iter().any(|s| s.ends_with(".vtt")) {
let i = dec_cmd
.iter()
.filter(|&n| n == "-i")
.count()
.saturating_sub(1);
dec_cmd.append(&mut vec_strings!("-map", format!("{i}:s"), "-c:s", "copy"));
}
if let Some(mut cmd) = config.processing.cmd.clone() {
dec_cmd.append(&mut cmd);
}
debug!(target: Target::file_mail(), channel = id;
"Decoder CMD: <bright-blue>\"ffmpeg {}\"</>",
dec_cmd.join(" ")
);
// create ffmpeg decoder instance, for reading the input files
let mut dec_proc = match Command::new("ffmpeg")
.args(dec_cmd)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Ok(proc) => proc,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn decoder process: {e}");
panic!("couldn't spawn decoder process: {e}")
}
};
let mut dec_reader = BufReader::new(dec_proc.stdout.take().unwrap());
let dec_err = BufReader::new(dec_proc.stderr.take().unwrap());
*manager.clone().decoder.lock().unwrap() = Some(dec_proc);
let channel_mgr_c = manager.clone();
let error_decoder_thread =
thread::spawn(move || stderr_reader(dec_err, ignore_dec, Decoder, channel_mgr_c));
loop {
// when server is running, read from it
if ingest_is_running.load(Ordering::SeqCst) {
if !live_on {
info!(target: Target::file_mail(), channel = id; "Switch from {} to live ingest", config.processing.mode);
if let Err(e) = manager.stop(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
live_on = true;
playlist_init.store(true, Ordering::SeqCst);
}
for rx in ingest_receiver.as_ref().unwrap().try_iter() {
if let Err(e) = enc_writer.write(&rx.1[..rx.0]) {
error!(target: Target::file_mail(), channel = id; "Error from Ingest: {:?}", e);
break 'source_iter;
};
}
// read from decoder instance
} else {
if live_on {
info!(target: Target::file_mail(), channel = id; "Switch from live ingest to {}", config.processing.mode);
live_on = false;
break;
}
let dec_bytes_len = match dec_reader.read(&mut buffer[..]) {
Ok(length) => length,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "Reading error from decoder: {e:?}");
break 'source_iter;
}
};
if dec_bytes_len > 0 {
if let Err(e) = enc_writer.write(&buffer[..dec_bytes_len]) {
error!(target: Target::file_mail(), channel = id; "Encoder write error: {}", e.kind());
break 'source_iter;
};
} else {
break;
}
}
}
if let Err(e) = manager.wait(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
if let Err(e) = error_decoder_thread.join() {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
if let Ok(elapsed) = timer.elapsed() {
if elapsed.as_millis() < 300 {
error_count += 1;
if error_count > 10 {
error!(target: Target::file_mail(), channel = id; "Reach fatal error count, terminate channel!");
break;
}
} else {
error_count = 0;
}
}
}
trace!("Out of source loop");
sleep(Duration::from_secs(1));
manager.stop_all();
if let Err(e) = error_encoder_thread.join() {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
Ok(())
}

View File

@ -1,50 +0,0 @@
use std::process::{self, Command, Stdio};
use log::*;
use crate::player::{
controller::ProcessUnit::*,
utils::{prepare_output_cmd, Media},
};
use crate::utils::{config::PlayoutConfig, logging::Target};
use crate::vec_strings;
/// Desktop Output
///
/// Instead of streaming, we run a ffplay instance and play on desktop.
pub fn output(config: &PlayoutConfig, log_format: &str) -> process::Child {
let mut media = Media::new(0, "", false);
let id = config.general.channel_id;
media.unit = Encoder;
media.add_filter(config, &None);
let mut enc_prefix = vec_strings!["-hide_banner", "-nostats", "-v", log_format];
if let Some(input_cmd) = &config.advanced.encoder.input_cmd {
enc_prefix.append(&mut input_cmd.clone());
}
enc_prefix.append(&mut vec_strings!["-re", "-i", "pipe:0"]);
let enc_cmd = prepare_output_cmd(config, enc_prefix, &media.filter);
debug!(target: Target::file_mail(), channel = id;
"Encoder CMD: <bright-blue>\"ffmpeg {}\"</>",
enc_cmd.join(" ")
);
let enc_proc = match Command::new("ffmpeg")
.args(enc_cmd)
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn encoder process: {e}");
panic!("couldn't spawn encoder process: {e}")
}
Ok(proc) => proc,
};
enc_proc
}

View File

@ -1,50 +0,0 @@
use std::process::{self, Command, Stdio};
use log::*;
use crate::player::{
controller::ProcessUnit::*,
utils::{prepare_output_cmd, Media},
};
use crate::utils::{config::PlayoutConfig, logging::Target};
use crate::vec_strings;
/// Streaming Output
///
/// Prepare the ffmpeg command for streaming output
pub fn output(config: &PlayoutConfig, log_format: &str) -> process::Child {
let mut media = Media::new(0, "", false);
let id = config.general.channel_id;
media.unit = Encoder;
media.add_filter(config, &None);
let mut enc_prefix = vec_strings!["-hide_banner", "-nostats", "-v", log_format];
if let Some(input_cmd) = &config.advanced.encoder.input_cmd {
enc_prefix.append(&mut input_cmd.clone());
}
enc_prefix.append(&mut vec_strings!["-re", "-i", "pipe:0"]);
let enc_cmd = prepare_output_cmd(config, enc_prefix, &media.filter);
debug!(target: Target::file_mail(), channel = id;
"Encoder CMD: <bright-blue>\"ffmpeg {}\"</>",
enc_cmd.join(" ")
);
let enc_proc = match Command::new("ffmpeg")
.args(enc_cmd)
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn encoder process: {e}");
panic!("couldn't spawn encoder process: {e}")
}
Ok(proc) => proc,
};
enc_proc
}

View File

@ -1,231 +0,0 @@
use std::sync::{
atomic::Ordering,
{Arc, Mutex},
};
use lexical_sort::natural_lexical_cmp;
use log::*;
use rand::{seq::SliceRandom, thread_rng};
use walkdir::WalkDir;
use crate::player::{
controller::ChannelManager,
utils::{include_file_extension, time_in_seconds, Media, PlayoutConfig},
};
use crate::utils::logging::Target;
/// Folder Sources
///
/// Like playlist source, we create here a folder list for iterate over it.
#[derive(Debug, Clone)]
pub struct FolderSource {
manager: ChannelManager,
current_node: Media,
}
impl FolderSource {
pub fn new(config: &PlayoutConfig, manager: ChannelManager) -> Self {
let id = config.general.channel_id;
let mut path_list = vec![];
let mut media_list = vec![];
let mut index: usize = 0;
debug!(target: Target::file_mail(), channel = id;
"generate: {:?}, paths: {:?}",
config.general.generate, config.storage.paths
);
if config.general.generate.is_some() && !config.storage.paths.is_empty() {
for path in &config.storage.paths {
path_list.push(path)
}
} else {
path_list.push(&config.channel.storage)
}
for path in &path_list {
if !path.is_dir() {
error!(target: Target::file_mail(), channel = id; "Path not exists: <b><magenta>{path:?}</></b>");
}
for entry in WalkDir::new(path)
.into_iter()
.flat_map(|e| e.ok())
.filter(|f| f.path().is_file())
.filter(|f| include_file_extension(config, f.path()))
{
let media = Media::new(0, &entry.path().to_string_lossy(), false);
media_list.push(media);
}
}
if media_list.is_empty() {
error!(target: Target::file_mail(), channel = id;
"no playable files found under: <b><magenta>{:?}</></b>",
path_list
);
}
if config.storage.shuffle {
info!(target: Target::file_mail(), channel = id; "Shuffle files");
let mut rng = thread_rng();
media_list.shuffle(&mut rng);
} else {
media_list.sort_by(|d1, d2| d1.source.cmp(&d2.source));
}
for item in media_list.iter_mut() {
item.index = Some(index);
index += 1;
}
*manager.current_list.lock().unwrap() = media_list;
Self {
manager,
current_node: Media::new(0, "", false),
}
}
pub fn from_list(manager: &ChannelManager, list: Vec<Media>) -> Self {
*manager.current_list.lock().unwrap() = list;
Self {
manager: manager.clone(),
current_node: Media::new(0, "", false),
}
}
fn shuffle(&mut self) {
let mut rng = thread_rng();
let mut nodes = self.manager.current_list.lock().unwrap();
nodes.shuffle(&mut rng);
for (index, item) in nodes.iter_mut().enumerate() {
item.index = Some(index);
}
}
fn sort(&mut self) {
let mut nodes = self.manager.current_list.lock().unwrap();
nodes.sort_by(|d1, d2| d1.source.cmp(&d2.source));
for (index, item) in nodes.iter_mut().enumerate() {
item.index = Some(index);
}
}
}
/// Create iterator for folder source
impl Iterator for FolderSource {
type Item = Media;
fn next(&mut self) -> Option<Self::Item> {
let config = self.manager.config.lock().unwrap().clone();
let id = config.general.id;
if self.manager.current_index.load(Ordering::SeqCst)
< self.manager.current_list.lock().unwrap().len()
{
let i = self.manager.current_index.load(Ordering::SeqCst);
self.current_node = self.manager.current_list.lock().unwrap()[i].clone();
let _ = self.current_node.add_probe(false).ok();
self.current_node
.add_filter(&config, &self.manager.filter_chain);
self.current_node.begin = Some(time_in_seconds());
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
Some(self.current_node.clone())
} else {
if config.storage.shuffle {
if config.general.generate.is_none() {
info!(target: Target::file_mail(), channel = id; "Shuffle files");
}
self.shuffle();
} else {
if config.general.generate.is_none() {
info!(target: Target::file_mail(), channel = id; "Sort files");
}
self.sort();
}
self.current_node = match self.manager.current_list.lock().unwrap().first() {
Some(m) => m.clone(),
None => return None,
};
let _ = self.current_node.add_probe(false).ok();
self.current_node
.add_filter(&config, &self.manager.filter_chain);
self.current_node.begin = Some(time_in_seconds());
self.manager.current_index.store(1, Ordering::SeqCst);
Some(self.current_node.clone())
}
}
}
pub fn fill_filler_list(
config: &PlayoutConfig,
fillers: Option<Arc<Mutex<Vec<Media>>>>,
) -> Vec<Media> {
let id = config.general.channel_id;
let mut filler_list = vec![];
let filler_path = &config.storage.filler_path;
if filler_path.is_dir() {
for (index, entry) in WalkDir::new(&config.storage.filler_path)
.into_iter()
.flat_map(|e| e.ok())
.filter(|f| f.path().is_file())
.filter(|f| include_file_extension(config, f.path()))
.enumerate()
{
let mut media = Media::new(index, &entry.path().to_string_lossy(), false);
if fillers.is_none() {
if let Err(e) = media.add_probe(false) {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
}
filler_list.push(media);
}
if config.storage.shuffle {
let mut rng = thread_rng();
filler_list.shuffle(&mut rng);
} else {
filler_list.sort_by(|d1, d2| natural_lexical_cmp(&d1.source, &d2.source));
}
for (index, item) in filler_list.iter_mut().enumerate() {
item.index = Some(index);
}
if let Some(f) = fillers.as_ref() {
f.lock().unwrap().clone_from(&filler_list);
}
} else if filler_path.is_file() {
let mut media = Media::new(0, &config.storage.filler_path.to_string_lossy(), false);
if fillers.is_none() {
if let Err(e) = media.add_probe(false) {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
}
filler_list.push(media);
if let Some(f) = fillers.as_ref() {
f.lock().unwrap().clone_from(&filler_list);
}
}
filler_list
}

View File

@ -1,82 +0,0 @@
/// Import text/m3u file and create a playlist out of it
use std::{
//error::Error,
fs::{create_dir_all, File},
io::{BufRead, BufReader, Error, ErrorKind},
path::Path,
};
use crate::player::utils::{
json_reader, json_serializer::JsonPlaylist, json_writer, Media, PlayoutConfig,
};
pub fn import_file(
config: &PlayoutConfig,
date: &str,
channel_name: Option<String>,
path: &Path,
) -> Result<String, Error> {
let file = File::open(path)?;
let reader = BufReader::new(file);
let mut playlist = JsonPlaylist {
channel: channel_name.unwrap_or_else(|| "Channel 1".to_string()),
date: date.to_string(),
path: None,
start_sec: None,
length: None,
modified: None,
program: vec![],
};
let playlist_root = &config.channel.playlists;
if !playlist_root.is_dir() {
return Err(Error::new(
ErrorKind::Other,
format!(
"Playlist folder <b><magenta>{:?}</></b> not exists!",
config.channel.playlists,
),
));
}
let d: Vec<&str> = date.split('-').collect();
let year = d[0];
let month = d[1];
let playlist_path = playlist_root.join(year).join(month);
let playlist_file = &playlist_path.join(format!("{date}.json"));
create_dir_all(playlist_path)?;
for line in reader.lines() {
let line = line?;
if !line.starts_with('#') {
let item = Media::new(0, &line, true);
if item.duration > 0.0 {
playlist.program.push(item);
}
}
}
let mut file_exists = false;
if playlist_file.is_file() {
file_exists = true;
let mut existing_data = json_reader(playlist_file)?;
existing_data.program.append(&mut playlist.program);
playlist.program = existing_data.program;
};
let mut msg = format!("Write playlist from {date} success!");
if file_exists {
msg = format!("Update playlist from {date} success!");
}
match json_writer(playlist_file, playlist) {
Ok(_) => Ok(msg),
Err(e) => Err(Error::new(ErrorKind::Other, e)),
}
}

View File

@ -1,201 +0,0 @@
use serde::{Deserialize, Serialize};
use std::{
fs::File,
path::Path,
sync::{atomic::AtomicBool, Arc, Mutex},
thread,
};
use log::*;
use crate::player::utils::{
get_date, is_remote, json_validate::validate_playlist, modified_time, time_from_header, Media,
PlayoutConfig,
};
use crate::utils::{config::DUMMY_LEN, logging::Target};
/// This is our main playlist object, it holds all necessary information for the current day.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JsonPlaylist {
#[serde(default = "default_channel")]
pub channel: String,
pub date: String,
#[serde(skip_serializing, skip_deserializing)]
pub start_sec: Option<f64>,
#[serde(skip_serializing, skip_deserializing)]
pub length: Option<f64>,
#[serde(skip_serializing, skip_deserializing)]
pub path: Option<String>,
#[serde(skip_serializing, skip_deserializing)]
pub modified: Option<String>,
pub program: Vec<Media>,
}
impl JsonPlaylist {
pub fn new(date: String, start: f64) -> Self {
let mut media = Media::new(0, "", false);
media.begin = Some(start);
media.title = None;
media.duration = DUMMY_LEN;
media.out = DUMMY_LEN;
Self {
channel: "Channel 1".into(),
date,
start_sec: Some(start),
length: Some(86400.0),
path: None,
modified: None,
program: vec![media],
}
}
}
impl PartialEq for JsonPlaylist {
fn eq(&self, other: &Self) -> bool {
self.channel == other.channel && self.date == other.date && self.program == other.program
}
}
impl Eq for JsonPlaylist {}
fn default_channel() -> String {
"Channel 1".to_string()
}
pub fn set_defaults(playlist: &mut JsonPlaylist) {
let mut start_sec = playlist.start_sec.unwrap();
let mut length = 0.0;
// Add extra values to every media clip
for (i, item) in playlist.program.iter_mut().enumerate() {
item.begin = Some(start_sec);
item.index = Some(i);
item.last_ad = false;
item.next_ad = false;
item.process = Some(true);
item.filter = None;
let dur = item.out - item.seek;
start_sec += dur;
length += dur;
}
playlist.length = Some(length)
}
/// Read json playlist file, fills JsonPlaylist struct and set some extra values,
/// which we need to process.
pub fn read_json(
config: &mut PlayoutConfig,
current_list: Arc<Mutex<Vec<Media>>>,
path: Option<String>,
is_terminated: Arc<AtomicBool>,
seek: bool,
get_next: bool,
) -> JsonPlaylist {
let id = config.general.channel_id;
let config_clone = config.clone();
let mut playlist_path = config.channel.playlists.clone();
let start_sec = config.playlist.start_sec.unwrap();
let date = get_date(seek, start_sec, get_next);
if playlist_path.is_dir() || is_remote(&config.channel.playlists.to_string_lossy()) {
let d: Vec<&str> = date.split('-').collect();
playlist_path = playlist_path
.join(d[0])
.join(d[1])
.join(date.clone())
.with_extension("json");
}
let mut current_file = playlist_path.as_path().display().to_string();
if let Some(p) = path {
Path::new(&p).clone_into(&mut playlist_path);
current_file = p
}
if is_remote(&current_file) {
let response = reqwest::blocking::Client::new().get(&current_file).send();
if let Ok(resp) = response {
if resp.status().is_success() {
let headers = resp.headers().clone();
if let Ok(body) = resp.text() {
let mut playlist: JsonPlaylist = match serde_json::from_str(&body) {
Ok(p) => p,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "Could't read remote json playlist. {e:?}");
JsonPlaylist::new(date.clone(), start_sec)
}
};
playlist.path = Some(current_file);
playlist.start_sec = Some(start_sec);
if let Some(time) = time_from_header(&headers) {
playlist.modified = Some(time.to_string());
}
let list_clone = playlist.clone();
if !config.general.skip_validation {
thread::spawn(move || {
validate_playlist(config_clone, current_list, list_clone, is_terminated)
});
}
set_defaults(&mut playlist);
return playlist;
}
}
}
} else if playlist_path.is_file() {
let modified = modified_time(&current_file);
let f = File::options()
.read(true)
.write(false)
.open(&current_file)
.expect("Could not open json playlist file.");
let mut playlist: JsonPlaylist = match serde_json::from_reader(f) {
Ok(p) => p,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "Playlist file not readable! {e}");
JsonPlaylist::new(date.clone(), start_sec)
}
};
// catch empty program list
if playlist.program.is_empty() {
playlist = JsonPlaylist::new(date, start_sec)
}
playlist.path = Some(current_file);
playlist.start_sec = Some(start_sec);
playlist.modified = modified;
let list_clone = playlist.clone();
if !config.general.skip_validation {
thread::spawn(move || {
validate_playlist(config_clone, current_list, list_clone, is_terminated)
});
}
set_defaults(&mut playlist);
return playlist;
}
error!(target: Target::file_mail(), channel = id; "Playlist <b><magenta>{current_file}</></b> not exist!");
JsonPlaylist::new(date, start_sec)
}

View File

@ -1,262 +0,0 @@
use std::{
io::{BufRead, BufReader},
process::{Command, Stdio},
sync::{
atomic::{AtomicBool, Ordering},
Arc, Mutex,
},
time::Instant,
};
use log::*;
use regex::Regex;
use crate::player::filter::FilterType::Audio;
use crate::player::utils::{
is_close, is_remote, loop_image, sec_to_time, seek_and_length, JsonPlaylist, Media,
};
use crate::utils::{
config::{OutputMode::Null, PlayoutConfig, FFMPEG_IGNORE_ERRORS, IMAGE_FORMAT},
errors::ProcessError,
logging::Target,
};
use crate::vec_strings;
/// Validate a single media file.
///
/// - Check if file exists
/// - Check if ffmpeg can read the file
/// - Check if Metadata exists
/// - Check if the file is not silent
fn check_media(
mut node: Media,
pos: usize,
begin: f64,
config: &PlayoutConfig,
) -> Result<(), ProcessError> {
let id = config.general.channel_id;
let mut dec_cmd = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
let mut error_list = vec![];
let mut config = config.clone();
config.output.mode = Null;
let mut process_length = 0.1;
if let Some(decoder_input_cmd) = &config.advanced.decoder.input_cmd {
dec_cmd.append(&mut decoder_input_cmd.clone());
}
if config.logging.detect_silence {
process_length = 15.0;
let seek = node.duration / 4.0;
// Seek in file, to prevent false silence detection on intros without sound.
dec_cmd.append(&mut vec_strings!["-ss", seek]);
}
// Take care, that no seek and length command is added.
node.seek = 0.0;
node.out = node.duration;
if node
.source
.rsplit_once('.')
.map(|(_, e)| e.to_lowercase())
.filter(|c| IMAGE_FORMAT.contains(&c.as_str()))
.is_some()
{
node.cmd = Some(loop_image(&config, &node));
} else {
node.cmd = Some(seek_and_length(&config, &mut node));
}
node.add_filter(&config, &None);
let mut filter = node.filter.unwrap_or_default();
if filter.cmd().len() > 1 {
let re_clean = Regex::new(r"volume=[0-9.]+")?;
filter.audio_chain = re_clean
.replace_all(&filter.audio_chain, "anull")
.to_string();
}
filter.add_filter("silencedetect=n=-30dB", 0, Audio);
dec_cmd.append(&mut node.cmd.unwrap_or_default());
dec_cmd.append(&mut filter.cmd());
dec_cmd.append(&mut filter.map());
dec_cmd.append(&mut vec_strings!["-t", process_length, "-f", "null", "-"]);
let mut enc_proc = Command::new("ffmpeg")
.args(dec_cmd)
.stderr(Stdio::piped())
.spawn()?;
let enc_err = BufReader::new(enc_proc.stderr.take().unwrap());
let mut silence_start = 0.0;
let mut silence_end = 0.0;
let re_start = Regex::new(r"silence_start: ([0-9]+:)?([0-9.]+)")?;
let re_end = Regex::new(r"silence_end: ([0-9]+:)?([0-9.]+)")?;
for line in enc_err.lines() {
let line = line?;
if !FFMPEG_IGNORE_ERRORS.iter().any(|i| line.contains(*i))
&& !config.logging.ignore_lines.iter().any(|i| line.contains(i))
&& (line.contains("[error]") || line.contains("[fatal]"))
{
let log_line = line.replace("[error] ", "").replace("[fatal] ", "");
if !error_list.contains(&log_line) {
error_list.push(log_line);
}
}
if config.logging.detect_silence {
if let Some(start) = re_start.captures(&line).and_then(|c| c.get(2)) {
silence_start = start.as_str().parse::<f32>().unwrap_or_default();
}
if let Some(end) = re_end.captures(&line).and_then(|c| c.get(2)) {
silence_end = end.as_str().parse::<f32>().unwrap_or_default() + 0.5;
}
}
}
if silence_end - silence_start > process_length {
error_list.push("Audio is totally silent!".to_string());
}
if !error_list.is_empty() {
error!(target: Target::file_mail(), channel = id;
"<bright black>[Validator]</> ffmpeg error on position <yellow>{pos}</> - {}: <b><magenta>{}</></b>: {}",
sec_to_time(begin),
node.source,
error_list.join("\n")
)
}
error_list.clear();
if let Err(e) = enc_proc.wait() {
error!(target: Target::file_mail(), channel = id; "Validation process: {e:?}");
}
Ok(())
}
/// Validate a given playlist, to check if:
///
/// - the source files are existing
/// - file can be read by ffprobe and metadata exists
/// - total playtime fits target length from config
///
/// This function we run in a thread, to don't block the main function.
pub fn validate_playlist(
mut config: PlayoutConfig,
current_list: Arc<Mutex<Vec<Media>>>,
mut playlist: JsonPlaylist,
is_terminated: Arc<AtomicBool>,
) {
let id = config.general.channel_id;
let date = playlist.date;
if config.text.add_text && !config.text.text_from_filename {
// Turn of drawtext filter with zmq, because its port is needed by the decoder instance.
config.text.add_text = false;
}
let mut length = config.playlist.length_sec.unwrap();
let mut begin = config.playlist.start_sec.unwrap();
length += begin;
debug!(target: Target::file_mail(), channel = id; "Validate playlist from: <yellow>{date}</>");
let timer = Instant::now();
for (index, item) in playlist.program.iter_mut().enumerate() {
if is_terminated.load(Ordering::SeqCst) {
return;
}
let pos = index + 1;
if !is_remote(&item.source) {
if item.audio.is_empty() {
if let Err(e) = item.add_probe(false) {
error!(target: Target::file_mail(), channel = id;
"[Validation] Error on position <yellow>{pos:0>3}</> <yellow>{}</>: {e}",
sec_to_time(begin)
);
}
} else if let Err(e) = item.add_probe(true) {
error!(target: Target::file_mail(), channel = id;
"[Validation] Error on position <yellow>{pos:0>3}</> <yellow>{}</>: {e}",
sec_to_time(begin)
);
}
}
if item.probe.is_some() {
if let Err(e) = check_media(item.clone(), pos, begin, &config) {
error!(target: Target::file_mail(), channel = id; "{e}");
} else if config.general.validate {
debug!(target: Target::file_mail(), channel = id;
"[Validation] Source at <yellow>{}</>, seems fine: <b><magenta>{}</></b>",
sec_to_time(begin),
item.source
)
} else if let Ok(mut list) = current_list.try_lock() {
// Filter out same item in current playlist, then add the probe to it.
// Check also if duration differs with playlist value, log error if so and adjust that value.
list.iter_mut().filter(|list_item| list_item.source == item.source).for_each(|o| {
o.probe.clone_from(&item.probe);
if let Some(dur) =
item.probe.as_ref().and_then(|f| f.format.duration.clone())
{
let probe_duration = dur.parse().unwrap_or_default();
if !is_close(o.duration, probe_duration, 1.2) {
error!(target: Target::file_mail(), channel = id;
"[Validation] File duration (at: <yellow>{}</>) differs from playlist value. File duration: <yellow>{}</>, playlist value: <yellow>{}</>, source <b><magenta>{}</></b>",
sec_to_time(o.begin.unwrap_or_default()), sec_to_time(probe_duration), sec_to_time(o.duration), o.source
);
o.duration = probe_duration;
}
}
if o.audio == item.audio && item.probe_audio.is_some() {
o.probe_audio.clone_from(&item.probe_audio);
o.duration_audio = item.duration_audio;
}
});
}
}
begin += item.out - item.seek;
}
if !config.playlist.infinit && length > begin + 1.2 {
error!(target: Target::file_mail(), channel = id;
"[Validation] Playlist from <yellow>{date}</> not long enough, <yellow>{}</> needed!",
sec_to_time(length - begin),
);
}
if config.general.validate {
info!(target: Target::file_mail(), channel = id;
"[Validation] Playlist length: <yellow>{}</>",
sec_to_time(begin - config.playlist.start_sec.unwrap())
);
}
debug!(target: Target::file_mail(), channel = id;
"Validation done, in <yellow>{:.3?}</>, playlist length: <yellow>{}</> ...",
timer.elapsed(),
sec_to_time(begin - config.playlist.start_sec.unwrap())
);
}

File diff suppressed because it is too large Load Diff

View File

@ -1,155 +0,0 @@
use std::{
sync::{atomic::Ordering, Arc},
time::Duration,
};
use actix_web::{rt::time::interval, web};
use actix_web_lab::{
sse::{self, Sse},
util::InfallibleStream,
};
use parking_lot::Mutex;
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use crate::player::{controller::ChannelManager, utils::get_data_map};
use crate::utils::system;
#[derive(Debug, Clone)]
struct Client {
manager: ChannelManager,
endpoint: String,
sender: mpsc::Sender<sse::Event>,
}
impl Client {
fn new(manager: ChannelManager, endpoint: String, sender: mpsc::Sender<sse::Event>) -> Self {
Self {
manager,
endpoint,
sender,
}
}
}
pub struct Broadcaster {
inner: Mutex<BroadcasterInner>,
}
#[derive(Debug, Clone, Default)]
struct BroadcasterInner {
clients: Vec<Client>,
}
impl Broadcaster {
/// Constructs new broadcaster and spawns ping loop.
pub fn create() -> Arc<Self> {
let this = Arc::new(Broadcaster {
inner: Mutex::new(BroadcasterInner::default()),
});
Broadcaster::spawn_ping(Arc::clone(&this));
this
}
/// Pings clients every 10 seconds to see if they are alive and remove them from the broadcast
/// list if not.
fn spawn_ping(this: Arc<Self>) {
actix_web::rt::spawn(async move {
let mut interval = interval(Duration::from_secs(1));
let mut counter = 0;
loop {
interval.tick().await;
if counter % 10 == 0 {
this.remove_stale_clients().await;
}
this.broadcast_playout().await;
this.broadcast_system().await;
counter = (counter + 1) % 61;
}
});
}
/// Removes all non-responsive clients from broadcast list.
async fn remove_stale_clients(&self) {
let clients = self.inner.lock().clients.clone();
let mut ok_clients = Vec::new();
for client in clients {
if client
.sender
.send(sse::Event::Comment("ping".into()))
.await
.is_ok()
{
ok_clients.push(client.clone());
}
}
self.inner.lock().clients = ok_clients;
}
/// Registers client with broadcaster, returning an SSE response body.
pub async fn new_client(
&self,
manager: ChannelManager,
endpoint: String,
) -> Sse<InfallibleStream<ReceiverStream<sse::Event>>> {
let (tx, rx) = mpsc::channel(10);
tx.send(sse::Data::new("connected").into()).await.unwrap();
self.inner
.lock()
.clients
.push(Client::new(manager, endpoint, tx));
Sse::from_infallible_receiver(rx)
}
/// Broadcasts playout status to clients.
pub async fn broadcast_playout(&self) {
let clients = self.inner.lock().clients.clone();
for client in clients.iter().filter(|client| client.endpoint == "playout") {
let media_map = get_data_map(&client.manager);
if client.manager.is_alive.load(Ordering::SeqCst) {
let _ = client
.sender
.send(
sse::Data::new(serde_json::to_string(&media_map).unwrap_or_default())
.into(),
)
.await;
} else {
let _ = client
.sender
.send(sse::Data::new("not running").into())
.await;
}
}
}
/// Broadcasts system status to clients.
pub async fn broadcast_system(&self) {
let clients = self.inner.lock().clients.clone();
for client in clients {
if &client.endpoint == "system" {
let config = client.manager.config.lock().unwrap().clone();
if let Ok(stat) = web::block(move || system::stat(config.clone())).await {
let stat_string = stat.to_string();
let _ = client.sender.send(sse::Data::new(stat_string).into()).await;
};
}
}
}
}

View File

@ -1,55 +0,0 @@
use std::{
collections::HashSet,
time::{Duration, SystemTime},
};
use tokio::sync::Mutex;
use uuid::Uuid;
use crate::utils::errors::ServiceError;
pub mod broadcast;
pub mod routes;
#[derive(Debug, Eq, Hash, PartialEq, Clone, Copy)]
pub struct UuidData {
pub uuid: Uuid,
pub expiration: SystemTime,
}
impl UuidData {
pub fn new() -> Self {
Self {
uuid: Uuid::new_v4(),
expiration: SystemTime::now() + Duration::from_secs(2 * 3600), // 2 hours
}
}
}
impl Default for UuidData {
fn default() -> Self {
Self::new()
}
}
pub struct SseAuthState {
pub uuids: Mutex<HashSet<UuidData>>,
}
/// Remove all UUIDs from HashSet which are older the expiration time.
pub fn prune_uuids(uuids: &mut HashSet<UuidData>) {
uuids.retain(|entry| entry.expiration > SystemTime::now());
}
pub fn check_uuid(uuids: &mut HashSet<UuidData>, uuid: &str) -> Result<&'static str, ServiceError> {
let client_uuid = Uuid::parse_str(uuid)?;
prune_uuids(uuids);
match uuids.iter().find(|entry| entry.uuid == client_uuid) {
Some(_) => Ok("UUID is valid"),
None => Err(ServiceError::Unauthorized(
"Invalid or expired UUID".to_string(),
)),
}
}

View File

@ -1,88 +0,0 @@
use std::sync::Mutex;
use actix_web::{get, post, web, Responder};
use actix_web_grants::proc_macro::protect;
use serde::{Deserialize, Serialize};
use super::{check_uuid, prune_uuids, SseAuthState, UuidData};
use crate::db::models::Role;
use crate::player::controller::ChannelController;
use crate::sse::broadcast::Broadcaster;
use crate::utils::errors::ServiceError;
#[derive(Deserialize, Serialize)]
struct User {
#[serde(default, skip_serializing)]
endpoint: String,
uuid: String,
}
impl User {
fn new(endpoint: String, uuid: String) -> Self {
Self { endpoint, uuid }
}
}
/// **Get generated UUID**
///
/// ```BASH
/// curl -X GET 'http://127.0.0.1:8787/api/generate-uuid' -H 'Authorization: Bearer <TOKEN>'
/// ```
#[post("/generate-uuid")]
#[protect(
any("Role::GlobalAdmin", "Role::ChannelAdmin", "Role::User"),
ty = "Role"
)]
async fn generate_uuid(data: web::Data<SseAuthState>) -> Result<impl Responder, ServiceError> {
let mut uuids = data.uuids.lock().await;
let new_uuid = UuidData::new();
let user_auth = User::new(String::new(), new_uuid.uuid.to_string());
prune_uuids(&mut uuids);
uuids.insert(new_uuid);
Ok(web::Json(user_auth))
}
/// **Validate UUID**
///
/// ```BASH
/// curl -X GET 'http://127.0.0.1:8787/data/validate?uuid=f2f8c29b-712a-48c5-8919-b535d3a05a3a'
/// ```
#[get("/validate")]
async fn validate_uuid(
data: web::Data<SseAuthState>,
user: web::Query<User>,
) -> Result<impl Responder, ServiceError> {
let mut uuids = data.uuids.lock().await;
match check_uuid(&mut uuids, user.uuid.as_str()) {
Ok(s) => Ok(web::Json(s)),
Err(e) => Err(e),
}
}
/// **Connect to event handler**
///
/// ```BASH
/// curl -X GET 'http://127.0.0.1:8787/data/event/1?endpoint=system&uuid=f2f8c29b-712a-48c5-8919-b535d3a05a3a'
/// ```
#[get("/event/{id}")]
async fn event_stream(
broadcaster: web::Data<Broadcaster>,
data: web::Data<SseAuthState>,
id: web::Path<i32>,
user: web::Query<User>,
controllers: web::Data<Mutex<ChannelController>>,
) -> Result<impl Responder, ServiceError> {
let mut uuids = data.uuids.lock().await;
check_uuid(&mut uuids, user.uuid.as_str())?;
let manager = controllers.lock().unwrap().get(*id).unwrap();
Ok(broadcaster
.new_client(manager.clone(), user.endpoint.clone())
.await)
}

View File

@ -1,306 +0,0 @@
use std::path::Path;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, NoneAsEmptyString};
use shlex::split;
use sqlx::{Pool, Sqlite};
use tokio::io::AsyncReadExt;
use ts_rs::TS;
use crate::db::{handles, models::AdvancedConfiguration};
use crate::utils::ServiceError;
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct AdvancedConfig {
pub decoder: DecoderConfig,
pub encoder: EncoderConfig,
pub filter: FilterConfig,
pub ingest: IngestConfig,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct DecoderConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub input_param: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub output_param: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub input_cmd: Option<Vec<String>>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub output_cmd: Option<Vec<String>>,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct EncoderConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub input_param: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub input_cmd: Option<Vec<String>>,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct IngestConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub input_param: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub input_cmd: Option<Vec<String>>,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct FilterConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub deinterlace: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub pad_scale_w: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub pad_scale_h: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub pad_video: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub fps: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub scale: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub set_dar: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub fade_in: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub fade_out: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo_scale: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo_fade_in: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo_fade_out: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub tpad: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub drawtext_from_file: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub drawtext_from_zmq: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub aevalsrc: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub afade_in: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub afade_out: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub apad: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub volume: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub split: Option<String>,
}
impl AdvancedConfig {
pub fn new(config: AdvancedConfiguration) -> Self {
Self {
decoder: DecoderConfig {
input_param: config.decoder_input_param.clone(),
output_param: config.decoder_output_param.clone(),
input_cmd: match config.decoder_input_param {
Some(input_param) => split(&input_param),
None => None,
},
output_cmd: match config.decoder_output_param {
Some(output_param) => split(&output_param),
None => None,
},
},
encoder: EncoderConfig {
input_param: config.encoder_input_param.clone(),
input_cmd: match config.encoder_input_param {
Some(input_param) => split(&input_param),
None => None,
},
},
filter: FilterConfig {
deinterlace: config.filter_deinterlace,
pad_scale_w: config.filter_pad_scale_w,
pad_scale_h: config.filter_pad_scale_h,
pad_video: config.filter_pad_video,
fps: config.filter_fps,
scale: config.filter_scale,
set_dar: config.filter_set_dar,
fade_in: config.filter_fade_in,
fade_out: config.filter_fade_out,
overlay_logo_scale: config.filter_overlay_logo_scale,
overlay_logo_fade_in: config.filter_overlay_logo_fade_in,
overlay_logo_fade_out: config.filter_overlay_logo_fade_out,
overlay_logo: config.filter_overlay_logo,
tpad: config.filter_tpad,
drawtext_from_file: config.filter_drawtext_from_file,
drawtext_from_zmq: config.filter_drawtext_from_zmq,
aevalsrc: config.filter_aevalsrc,
afade_in: config.filter_afade_in,
afade_out: config.filter_afade_out,
apad: config.filter_apad,
volume: config.filter_volume,
split: config.filter_split,
},
ingest: IngestConfig {
input_param: config.ingest_input_param.clone(),
input_cmd: match config.ingest_input_param {
Some(input_param) => split(&input_param),
None => None,
},
},
}
}
pub async fn dump(pool: &Pool<Sqlite>, id: i32) -> Result<(), ServiceError> {
let config = Self::new(handles::select_advanced_configuration(pool, id).await?);
let f_keys = [
"deinterlace",
"pad_scale_w",
"pad_scale_h",
"pad_video",
"fps",
"scale",
"set_dar",
"fade_in",
"fade_out",
"overlay_logo_scale",
"overlay_logo_fade_in",
"overlay_logo_fade_out",
"overlay_logo",
"tpad",
"drawtext_from_file",
"drawtext_from_zmq",
"aevalsrc",
"afade_in",
"afade_out",
"apad",
"volume",
"split",
];
let toml_string = toml_edit::ser::to_string_pretty(&config)?;
let mut doc = toml_string.parse::<toml_edit::DocumentMut>()?;
if let Some(decoder) = doc.get_mut("decoder").and_then(|o| o.as_table_mut()) {
decoder
.decor_mut()
.set_prefix("# Changing these settings is for advanced users only!\n# There will be no support or guarantee that it will be stable after changing them.\n\n");
}
if let Some(output_param) = doc
.get_mut("decoder")
.and_then(|d| d.get_mut("output_param"))
.and_then(|o| o.as_value_mut())
{
output_param
.decor_mut()
.set_suffix(" # get also applied to ingest instance.");
}
if let Some(filter) = doc.get_mut("filter") {
for key in &f_keys {
if let Some(item) = filter.get_mut(*key).and_then(|o| o.as_value_mut()) {
match *key {
"deinterlace" => item.decor_mut().set_suffix(" # yadif=0:-1:0"),
"pad_scale_w" => item.decor_mut().set_suffix(" # scale={}:-1"),
"pad_scale_h" => item.decor_mut().set_suffix(" # scale=-1:{}"),
"pad_video" => item.decor_mut().set_suffix(
" # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2",
),
"fps" => item.decor_mut().set_suffix(" # fps={}"),
"scale" => item.decor_mut().set_suffix(" # scale={}:{}"),
"set_dar" => item.decor_mut().set_suffix(" # setdar=dar={}"),
"fade_in" => item.decor_mut().set_suffix(" # fade=in:st=0:d=0.5"),
"fade_out" => item.decor_mut().set_suffix(" # fade=out:st={}:d=1.0"),
"overlay_logo_scale" => item.decor_mut().set_suffix(" # scale={}"),
"overlay_logo_fade_in" => {
item.decor_mut().set_suffix(" # fade=in:st=0:d=1.0:alpha=1")
}
"overlay_logo_fade_out" => item
.decor_mut()
.set_suffix(" # fade=out:st={}:d=1.0:alpha=1"),
"overlay_logo" => item
.decor_mut()
.set_suffix(" # null[l];[v][l]overlay={}:shortest=1"),
"tpad" => item
.decor_mut()
.set_suffix(" # tpad=stop_mode=add:stop_duration={}"),
"drawtext_from_file" => {
item.decor_mut().set_suffix(" # drawtext=text='{}':{}{}")
}
"drawtext_from_zmq" => item
.decor_mut()
.set_suffix(" # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}"),
"aevalsrc" => item.decor_mut().set_suffix(
" # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000",
),
"afade_in" => item.decor_mut().set_suffix(" # afade=in:st=0:d=0.5"),
"afade_out" => item.decor_mut().set_suffix(" # afade=out:st={}:d=1.0"),
"apad" => item.decor_mut().set_suffix(" # apad=whole_dur={}"),
"volume" => item.decor_mut().set_suffix(" # volume={}"),
"split" => item.decor_mut().set_suffix(" # split={}{}"),
_ => (),
}
}
}
};
tokio::fs::write(&format!("advanced_{id}.toml"), doc.to_string()).await?;
Ok(())
}
pub async fn import(pool: &Pool<Sqlite>, id: i32, path: &Path) -> Result<(), ServiceError> {
if path.is_file() {
let mut file = tokio::fs::File::open(path).await?;
let mut contents = String::new();
file.read_to_string(&mut contents).await?;
let config: Self = toml_edit::de::from_str(&contents).unwrap();
handles::update_advanced_configuration(pool, id, config).await?;
} else {
return Err(ServiceError::BadRequest("Path not exists!".to_string()));
}
Ok(())
}
}

Some files were not shown because too many files have changed in this diff Show More