Compare commits

..

7 Commits

Author SHA1 Message Date
jb-alvarado
6fbde92f6b
Merge pull request #403 from jb-alvarado/v0.19
remove openssl dependencies
2023-10-08 20:12:17 +00:00
jb-alvarado
813e48fd54 remove openssl dependencies 2023-10-08 22:06:16 +02:00
jb-alvarado
943cf90e15 backport: fix preview in player #397 2023-10-08 20:48:59 +02:00
jb-alvarado
53aebc779b backport: fix clippy warning duplicate naming 2023-10-08 20:47:54 +02:00
jb-alvarado
b96e765f0b backport: rename filler_clip -> filler 2023-10-08 20:47:24 +02:00
jb-alvarado
05ab9aac71 backport: fix clippy warnings 2023-10-08 20:46:27 +02:00
jb-alvarado
0808fb29ab update packages 2023-10-08 20:45:59 +02:00
256 changed files with 20859 additions and 98213 deletions

View File

@ -1,2 +0,0 @@
[env]
TS_RS_EXPORT_DIR = { value = "frontend/types", relative = true }

2
.github/FUNDING.yml vendored
View File

@ -1,5 +1,3 @@
# These are supported funding model platforms
github: [jb-alvarado]
custom: PayPal.Me/jonaBaec
open_collective: ffplayout

View File

@ -7,16 +7,11 @@ assignees: ''
---
<!--
Note: use this template only when you have a bug to report!
-->
**Describe the bug**
### Describe the bug
<!--
A clear and concise description of what the bug is.
-->
### To Reproduce
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
@ -24,23 +19,22 @@ Steps to reproduce the behavior:
3. Scroll down to '....'
4. See error
### Expected behavior
<!--
**Expected behavior**
A clear and concise description of what you expected to happen.
-->
### Desktop/Server/Software (please complete the following information):
**Desktop/Server/Software (please complete the following information):**
- OS: [e.g. debian 12]
- OS: [e.g. debian 11]
- ffplayout version
- ffmpeg version
- are you using the current master of ffplayout?
### Config Settings:
**Config Settings:**
- command line arguments
- config fie
### Logging:
**Logging:**
- content of: ffplayout.log

View File

@ -1,26 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: '[Enhancement] <!--FEATURE NAME-->'
labels: enhancement
---
<!--
Note: use this template only when you have a feature request!
-->
### Feature description
<!--
A clear and concise description of what the feature should do.
-->
### The problem in the current version
<!--
What exactly is currently missing?
-->
### Alternative ways
<!--
What have you already tried to solve this problem?
-->

View File

@ -1,12 +0,0 @@
name: Autocloser
on: [issues]
jobs:
autoclose:
runs-on: ubuntu-latest
steps:
- name: Autoclose issues that did not follow issue template
uses: roots/issue-closer@v1.2
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
issue-close-message: "@${issue.user.login} this issue was automatically closed because it did not follow the issue template. Please read [CONTRIBUTING.md](https://github.com/ffplayout/ffplayout/blob/master/CONTRIBUTING.md) for more informations."
issue-pattern: ".*### Describe the bug([\\s\\S]*?)### To Reproduce.*|### Feature description.*"

View File

@ -8,19 +8,12 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- name: Set Build Tools and update Rust
- name: On all Systems
run: |
rustup update stable
rustup component add rustfmt
rustup component add clippy
- name: Init Submodules
run: |
git submodule update --init --recursive
- name: Use ffmpeg on Linux
if: ${{ matrix.os == 'ubuntu-latest' }}
uses: FedericoCarboni/setup-ffmpeg@v2

35
.gitignore vendored
View File

@ -20,40 +20,9 @@
*.deb
*.rpm
ffplayout.1.gz
ffpapi.1.gz
/assets/*.db*
/dist/
data/
/public/
tmp/
assets/playlist_template.json
advanced*.toml
ffplayout*.toml
template.json
# frontend stuff
node_modules
.nuxt
.nitro
.cache
.output
.env
dist
.eslintcache
*.tgz
.yarn-integrity
sw.*
.DS_Store
*.swp
master.m3u8
tv-media
tv-media/
Videos
Videos/
*.tar*
home
home/
live1
live1/
Musik
Musik/
test.vue
.vscode/

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "ffplayout-frontend"]
path = ffplayout-frontend
url = git@github.com:ffplayout/ffplayout-frontend.git

View File

@ -1,8 +0,0 @@
assets/
debian/
docker/
docs/
frontend/
migrations/
scripts/
tests/

View File

@ -1,13 +0,0 @@
{
"recommendations": [
"bradlc.vscode-tailwindcss",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"hollowtree.vue-snippets",
"rust-lang.rust-analyzer",
"statiolake.vscode-rustfmt",
"tamasfe.even-better-toml",
"vue.volar",
"wscats.vue",
]
}

96
.vscode/settings.json vendored
View File

@ -1,96 +0,0 @@
{
"eslint.useFlatConfig": true,
"prettier.tabWidth": 4,
"prettier.printWidth": 120,
"vue3snippets.semi": false,
"vue3snippets.singleQuote": true,
"vue3snippets.jsxSingleQuote": true,
"vue3snippets.printWidth": 120,
"vue3snippets.tabWidth": 4,
"prettier.jsxSingleQuote": true,
"prettier.semi": false,
"prettier.singleQuote": true,
"rust-analyzer.cargo.target": null,
"rust-analyzer.checkOnSave": true,
"rust-analyzer.cargo.buildScripts.overrideCommand": null,
"rust-analyzer.rustfmt.overrideCommand": null,
"rust-analyzer.inlayHints.chainingHints.enable": false,
"rust-analyzer.inlayHints.parameterHints.enable": false,
"rust-analyzer.inlayHints.typeHints.enable": false,
"rust-analyzer.diagnostics.disabled": ["unresolved-proc-macro"],
"[dockercompose]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[html]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[scss]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[vue]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[rust]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "statiolake.vscode-rustfmt"
},
"[yaml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"cSpell.words": [
"actix",
"aevalsrc",
"afade",
"apad",
"boxborderw",
"boxcolor",
"canonicalize",
"cgop",
"coeffs",
"ffpengine",
"flexi",
"fontcolor",
"fontfile",
"fontsize",
"httpauth",
"ifnot",
"keyint",
"lettre",
"libc",
"libx",
"libzmq",
"maxrate",
"minrate",
"muxdelay",
"muxer",
"muxpreload",
"n'vtt",
"neli",
"nuxt",
"paris",
"Referer",
"reqwest",
"rsplit",
"RTSP",
"rustls",
"scenecut",
"sqlite",
"sqlx",
"starttls",
"tokio",
"tpad",
"unistd",
"uuids",
"webm",
"zerolatency"
]
}

View File

@ -1,96 +1,5 @@
# Changelog
## [0.20.3](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.3) (2024-01-03)
### ffplayout
- improve live sources [9912405](https://github.com/ffplayout/ffplayout/commit/9912405e4e976b99be9d174fa9cc54700984d5a9)
- update sysinfo to support stats on network storage [8737769](https://github.com/ffplayout/ffplayout/commit/873776908e10b2eb9d92fb743a578a848e95c49c)
### Documentation
- fix API examples [c8ca4588d](https://github.com/ffplayout/ffplayout/commit/c8ca4588d178b1f94f5c7dce40fd4a07a10a695b)
## [0.20.2](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.2) (2023-12-16)
### ffplayout
- better error message [5c14b89](https://github.com/ffplayout/ffplayout/commit/5c14b895f2c8e34990097354fea860a5030a5732)
- warn and adjust duration on validation [a30f21b](https://github.com/ffplayout/ffplayout/commit/a30f21b86688fbf4de477279217ca3a739409719)
### ffpapi
- thread block on hashing [4c4199cb](https://github.com/ffplayout/ffplayout/commit/4c4199cbdb0836d69d67fd6dee1869fb08eeffbf)
- remove salt from table [15f41148](https://github.com/ffplayout/ffplayout/commit/15f41148dfb26ccaea159f5c5305a966cf81b1c4)
### frontend
- possibility to preview live/html sources [5881527](https://github.com/ffplayout/ffplayout/pull/472/commits/5881527fc571feccaee7f7f1877750ccc44516f5)
## [0.20.1](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.1) (2023-12-03)
### ffplayout
- add silence detection for validation [ea83160](https://github.com/ffplayout/ffplayout/commit/ea83160ba63bb8723de1f004f6449b37a1ea2593)
- loop separate audio when is to short [94e02ac](https://github.com/ffplayout/ffplayout/commit/94e02ac3678c0f8cdec97002f30e08beb45e748b)
- add probe object in validation thread, to hopefully reduce latency and reduce unneeded file access [0330ad6](https://github.com/ffplayout/ffplayout/commit/0330ad61681a4cb576d4a46365c8cdffdfc96379)
### ffpapi
- update actix-web-grants to v4 [f1e87975](https://github.com/ffplayout/ffplayout/commit/f1e8797528e649aac6de85d897b7c03b8007a2b3)
### frontend
- call system status only when app is not hidden [3f22297](https://github.com/ffplayout/ffplayout/commit/3f222975c16580deeeedaa2e0721e4a312e7c8fb)
- select, edit and delete user [f86a6c3](https://github.com/ffplayout/ffplayout/commit/f86a6c3f1dfb8ec5f3c8e74714b8eecda2b443c3)
- global middleware [c60d60d](https://github.com/ffplayout/ffplayout/commit/c60d60d9b3f74095034760f22876aed877e0464f)
## [0.20.0](https://github.com/ffplayout/ffplayout/releases/tag/v0.20.0) (2023-11-16)
### ffplayout
- run task on clip change, #276 [5bd1b2](https://github.com/ffplayout/ffplayout/commit/5bd1b23513d3cb0a9f6574626032acdd6627e790)
- support filler folder [98d1d5](https://github.com/ffplayout/ffplayout/commit/98d1d5d606b3f90ebeb1f0cd54156ee820272dd2) [04353a](https://github.com/ffplayout/ffplayout/commit/04353a984d43e1059ee9808ee08700e8c5e1cb8b)
- support log level as cmd argument [334f84](https://github.com/ffplayout/ffplayout/commit/334f842d1923e7150f0ed504fa85f4936c0213d7)
- add stream copy mode, fix #324 [b44efd](https://github.com/ffplayout/ffplayout/commit/b44efde8f1a771122c10f79e1a5da8ba724acd56)
- replace realtime filter with readrate parameter for hls mode [4b18d41](https://github.com/ffplayout/ffplayout/commit/4b18d414b7437f48a3663e9e9b547e83ab605cda) (**!WARNING:** older ffmpeg versions will not work anymore! Now 5.0+ is needed.)
- choice audio track index, fix #348 [1bfff2](https://github.com/ffplayout/ffplayout/commit/1bfff27b4b46405b52a428b38bd00fe4e9c3f78d)
- fix boxborderw value [fef7d0](https://github.com/ffplayout/ffplayout/commit/fef7d04e65b6275b6bb6c5b813c83b8641051882)
- stop decoder with SIGTERM signal, instead of kill on non windows systems [d2c72d](https://github.com/ffplayout/ffplayout/commit/d2c72d56fe0cc1cced14f8d1d1746f5224011499)
- generate playlists based on template [0c51f8](https://github.com/ffplayout/ffplayout/commit/0c51f8303cd3eacdec8a0ac3abe9edd69e2271c2)
- update chrono and fix audit warning [83cff6](https://github.com/ffplayout/ffplayout/commit/83cff609b3709f4621af506de2f8546099b8848c)
- jump out from source loop when playout is terminated [cf6e56](https://github.com/ffplayout/ffplayout/commit/cf6e5663e98eb52bc84c0e9e5856943ddefc24d9)
- fix program hang when mail sending not work [38e73a](https://github.com/ffplayout/ffplayout/commit/38e73a0138430fc600ae809356127941e1f08eb2)
### ffpapi
- embed static files from frontend in ffpapi, add db path argument [b4cde6e](https://github.com/ffplayout/ffplayout/commit/b4cde6e12ce70af20f52f308d7cb4288f97d31fe)
- Use enum for Role everywhere [7d31735](https://github.com/ffplayout/ffplayout/commit/7d3173533fd8b2a9d6e718ada0c81f017aedc777)
- get config also as normal user [7d31735](https://github.com/ffplayout/ffplayout/commit/7d3173533fd8b2a9d6e718ada0c81f017aedc777)
- fix time shift [7d31735](https://github.com/ffplayout/ffplayout/commit/7d3173533fd8b2a9d6e718ada0c81f017aedc777)
- add option for public path [c304386](https://github.com/ffplayout/ffplayout/commit/c30438697d33fe360e92146c03ad8ce212e138a6)
- add system stat route [c304386](https://github.com/ffplayout/ffplayout/commit/c30438697d33fe360e92146c03ad8ce212e138a6)
### frontend
- option to add user [debb75](https://github.com/ffplayout/ffplayout/commit/debb751428239f2d0ac446a0b9a805cd1ec4a965)
- fix audit alert, get status from playout stat [50bee9](https://github.com/ffplayout/ffplayout-frontend/commit/50bee93c8555b14181864a654239f7e68c50cafb)
- restart modal for config save [2f3234](https://github.com/ffplayout/ffplayout-frontend/commit/2f3234221a0aef8e70d9e2b5e9bbfb1fe51921fc)
- add advanced playlist generator, update packages [806d53](https://github.com/ffplayout/ffplayout-frontend/commit/806d533bc2a84fc994897371071c4399172fa639)
- add dashboard [ba0c0fa](https://github.com/ffplayout/ffplayout/pull/446/commits/ba0c0faaac9c44fbf4f87752c89aaa8859be9bf1)
## [0.19.1](https://github.com/ffplayout/ffplayout/releases/tag/v0.19.1) (2023-10-08)
### ffplayout
- remove openssl dependencies [813e48f](https://github.com/ffplayout/ffplayout/commit/813e48fd54a6482eb09ec418e507733d689663d9)
- update packages [0808fb](https://github.com/ffplayout/ffplayout/commit/0808fb29ab8db17cf1d251336cc90c1db7aa92e0)
### frontend
- fix preview in player #397 [943cf9](https://github.com/ffplayout/ffplayout/commit/943cf90e15edc0efdb9abf0703cc6addbd3dfecc)
## [0.19.0](https://github.com/ffplayout/ffplayout/releases/tag/v0.19.0) (2023-07-19)
### ffplayout

View File

@ -1,6 +1,6 @@
## Contribute to ffplayout
### Report a bug
#### **Report a bug**
- Check issues if the bug was already reported.
- When this bug was not reported, please use the **bug report** template.
@ -8,7 +8,7 @@
* use code blocks for config, log and command line parameters
* text from config and logging is preferred over screenshots
### Ask for help
#### **Ask for help**
When something is not working, you can feel free to ask your question under [discussions](https://github.com/ffplayout/ffplayout/discussions/categories/q-a). But please make some effort, so it makes it more easy to help. Please don't open discussion in a "WhatsApp style", with only one line of text. As a general rule of thumb answer this points:
@ -19,26 +19,11 @@ When something is not working, you can feel free to ask your question under [dis
- relevant logging output
- current configuration (ffplayout.yml)
#### Sharing Screenshots
All kinds of logging and terminal outputs please share in a code block that is surrounded by **```**.
When something is wrong in the frontend you can also share as a screenshot/screen record, but please share them with English language selected.
#### Sample files
If playout works normally on your system with the [provided test clips](https://github.com/ffplayout/ffplayout/tree/master/tests/assets/media_sorted), but your files produce errors and you are sure that the problem is related to ffplayout, you can provide a test file under these conditions:
- ffmpeg can process the file normally.
- The file is not too large, a few seconds should be enough.
- The video doesn't contain any illegal content.
- You have legal permission to distribute the file.
- The content is not age restricted (no violent or sexual content).
### Feature request
#### **Feature request**
You can ask for features, but it can not be guaranteed that this will find its way to the code basis. Try to think if your idea is useful for others to and describe it in a understandable way. If your idea is accepted, it can take time until it will be apply. In general stability goes over features, and when just a new version has arrived, it can take time to prove itself in production.
### Create a pull request
#### **Create a pull request**
In general pull requests are very welcome! But please don't create features, which are to specific and helps only your use case and no one else. If your are not sure, better ask before you start.

3208
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +1,10 @@
[workspace]
members = ["engine", "tests"]
members = ["ffplayout-api", "ffplayout-engine", "lib", "tests"]
default-members = ["ffplayout-api", "ffplayout-engine", "tests"]
resolver = "2"
[workspace.package]
description = "24/7 playout based on rust and ffmpeg"
readme = "README.md"
version = "0.24.0"
version = "0.19.1"
license = "GPL-3.0"
repository = "https://github.com/ffplayout/ffplayout"
authors = ["Jonathan Baecker <jonbae77@gmail.com>"]

View File

@ -1,41 +0,0 @@
[target.x86_64-unknown-linux-musl]
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.aarch64-unknown-linux-gnu]
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.x86_64-pc-windows-gnu]
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.x86_64-apple-darwin]
image = "ghcr.io/cross-rs/x86_64-apple-darwin-cross:local"
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]
[target.aarch64-apple-darwin]
image = "ghcr.io/cross-rs/aarch64-apple-darwin-cross:local"
pre-build = [
"apt-get update",
"apt-get --assume-yes install curl",
"curl -fsSL https://deb.nodesource.com/setup_20.x | bash -",
"apt-get --assume-yes install nodejs"
]

101
README.md
View File

@ -3,7 +3,7 @@
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
![player](/docs/images/player.png)
## **ffplayout-engine (ffplayout)**
[ffplayout](/ffplayout-engine/README.md) is a 24/7 broadcasting solution. It can playout a folder containing audio or video clips, or play a *JSON* playlist for each day, keeping the current playlist editable.
@ -13,17 +13,17 @@ Check the [releases](https://github.com/ffplayout/ffplayout/releases/latest) for
### Features
- start program with [web based frontend](/frontend/), or run playout in foreground mode without frontend
- have all values in a separate config file
- dynamic playlist
- replace missing playlist or clip with single filler or multiple fillers from folder, if no filler exists, create dummy clip
- replace missing playlist or clip with a dummy clip
- playing clips in [watched](/docs/folder_mode.md) folder mode
- send emails with error message
- overlay a logo
- overlay text, controllable through [web frontend](/frontend/) (needs ffmpeg with libzmq and enabled JSON RPC server)
- overlay text, controllable through [ffplayout-frontend](https://github.com/ffplayout/ffplayout-frontend) (needs ffmpeg with libzmq and enabled JSON RPC server)
- loop playlist infinitely
- [remote source](/docs/remote_source.md)
- trim and fade the last clip, to get full 24 hours
- when playlist is not 24 hours long, loop fillers until time is full
- when playlist is not 24 hours long, loop filler clip until time is full
- set custom day start, so you can have playlist for example: from 6am to 6am, instate of 0am to 12pm
- normal system requirements and no special tools
- no GPU power is needed
@ -42,27 +42,29 @@ Check the [releases](https://github.com/ffplayout/ffplayout/releases/latest) for
- **desktop**
- **HLS**
- **null** (for debugging)
- JSON RPC server, to get information about what is playing and to control it
- [live ingest](/docs/live_ingest.md)
- image source (will loop until out duration is reached)
- extra audio source, has priority over audio from video (experimental *)
- [multiple audio tracks](/docs/multi_audio.md) (experimental *)
- [Stream Copy](/docs/stream_copy.md) mode (experimental *)
- [custom filters](/docs/custom_filters.md) globally in config, or in playlist for specific clips
- import playlist from text or m3u file, with CLI or frontend
- audio only, for radio mode (experimental *)
- generate playlist based on [template](/docs/playlist_gen.md) (experimental *)
- During playlist import, all video clips are validated and, if desired, checked to ensure that the audio track is not completely muted.
- run multiple channels (experimental *)
- [Piggyback Mode](/ffplayout-api/README.md#piggyback-mode), mostly for non Linux systems (experimental *)
For preview stream, read: [/docs/preview_stream.md](/docs/preview_stream.md)
**\* Experimental features do not guarantee the same stability and may fail under unusual circumstances. Code and configuration options may change in the future.**
## **ffplayout-api (ffpapi)**
ffpapi serves the [frontend](https://github.com/ffplayout/ffplayout-frontend) and it acts as a [REST API](/ffplayout-api/README.md) for controlling the engine, manipulate playlists, add settings etc.
### Requirements
- RAM and CPU depends on video resolution, minimum 4 _dedicated_ threads and 3GB RAM for 720p are recommend
- **ffmpeg** v5.0+ and **ffprobe** (**ffplay** if you want to play on desktop)
- if you want to overlay dynamic text, ffmpeg needs to have **libzmq**
- RAM and CPU depends on video resolution, minimum 4 threads and 3GB RAM for 720p are recommend
- **ffmpeg** v4.2+ and **ffprobe** (**ffplay** if you want to play on desktop)
- if you want to overlay text, ffmpeg needs to have **libzmq**
### Install
@ -114,22 +116,81 @@ Check [install](docs/install.md) for details about how to install ffplayout.
]
}
```
If you are in playlist mode and move backwards or forwards in time, the time shift is saved so the playlist is still in sync. Bear in mind, however, that this may make your playlist too short. If you do not reset it, it will automatically reset the next day.
## **Warning**
(Endless) streaming over multiple days will only work if config has a **day_start** value and the **length** value is **24 hours**. If you only need a few hours for each day, use a *cron* job or something similar.
## Note
This project includes the DejaVu font, which are licensed under the [Bitstream Vera Fonts License](/assets/FONT_LICENSE.txt).
-----
## HLS output
For outputting to HLS, output parameters should look like:
```yaml
out:
...
output_param: >-
...
-flags +cgop
-f hls
-hls_time 6
-hls_list_size 600
-hls_flags append_list+delete_segments+omit_endlist+program_date_time
-hls_segment_filename /var/www/html/live/stream-%09d.ts /var/www/html/live/stream.m3u8
```
-----
## Sponsoring
## JSON RPC
If you like this project and would like to make a donation, please use one of the options provided.
Please note that donations are not intended to get support or features! Donations are only a sign of appreciation.
The ffplayout engine can run a simple RPC server. A request looks like:
### Backers
```Bash
curl -X POST -H "Content-Type: application/json" -H "Authorization: ---auth-key---" \
-d '{"control":"next"}' \
127.0.0.1:7070
```
[![](https://opencollective.com/ffplayout/backers.svg?width=800&button=true)](https://opencollective.com/ffplayout)
At the moment this commends are possible:
```Bash
'{"media":"current"}' # get infos about current clip
'{"media":"next"}' # get infos about next clip
'{"media":"last"}' # get infos about last clip
'{"control":"next"}' # jump to next clip
'{"control":"back"}' # jump to last clip
'{"control":"reset"}' # reset playlist to old state
'{"control":"text", \
"message": {"text": "Hello from ffplayout", "x": "(w-text_w)/2", "y": "(h-text_h)/2", \
"fontsize": 24, "line_spacing": 4, "fontcolor": "#ffffff", "box": 1, \
"boxcolor": "#000000", "boxborderw": 4, "alpha": 1.0}}' # send text to drawtext filter from ffmpeg
```
Output from `{"media":"current"}` show:
```JSON
{
"jsonrpc": "2.0",
"result": {
"current_media": {
"category": "",
"duration": 154.2,
"out": 154.2,
"seek": 0.0,
"source": "/opt/tv-media/clip.mp4"
},
"index": 39,
"play_mode": "playlist",
"played_sec": 67.80771999300123,
"remaining_sec": 86.39228000699876,
"start_sec": 24713.631999999998,
"start_time": "06:51:53.631"
},
"id": 1
}
```
If you are in playlist mode and move backwards or forwards in time, the time shift is saved so the playlist is still in sync. Bear in mind, however, that this may make your playlist too short. If you do not reset it, it will automatically reset the next day.

5
assets/11-ffplayout Normal file
View File

@ -0,0 +1,5 @@
# give user ffpu permission to control the ffplayout systemd service
ffpu ALL = NOPASSWD: /usr/bin/systemctl start ffplayout.service, /usr/bin/systemctl stop ffplayout.service, /usr/bin/systemctl restart ffplayout.service, /usr/bin/systemctl status ffplayout.service, /usr/bin/systemctl is-active ffplayout.service, /usr/bin/systemctl enable ffplayout.service, /usr/bin/systemctl disable ffplayout.service
ffpu ALL = NOPASSWD: /usr/bin/systemctl start ffplayout@*, /usr/bin/systemctl stop ffplayout@*, /usr/bin/systemctl restart ffplayout@*, /usr/bin/systemctl status ffplayout@*, /usr/bin/systemctl is-active ffplayout@*, /usr/bin/systemctl enable ffplayout@*, /usr/bin/systemctl disable ffplayout@*

Binary file not shown.

View File

@ -1,187 +0,0 @@
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
Glyphs imported from Arev fonts are (c) Tavmjong Bah (see below)
Bitstream Vera Fonts Copyright
------------------------------
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is
a trademark of Bitstream, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of the fonts accompanying this license ("Fonts") and associated
documentation files (the "Font Software"), to reproduce and distribute the
Font Software, including without limitation the rights to use, copy, merge,
publish, distribute, and/or sell copies of the Font Software, and to permit
persons to whom the Font Software is furnished to do so, subject to the
following conditions:
The above copyright and trademark notices and this permission notice shall
be included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular
the designs of glyphs or characters in the Fonts may be modified and
additional glyphs or characters may be added to the Fonts, only if the fonts
are renamed to names not containing either the words "Bitstream" or the word
"Vera".
This License becomes null and void to the extent applicable to Fonts or Font
Software that has been modified and is distributed under the "Bitstream
Vera" names.
The Font Software may be sold as part of a larger software package but no
copy of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING
ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE
FONT SOFTWARE.
Except as contained in this notice, the names of Gnome, the Gnome
Foundation, and Bitstream Inc., shall not be used in advertising or
otherwise to promote the sale, use or other dealings in this Font Software
without prior written authorization from the Gnome Foundation or Bitstream
Inc., respectively. For further information, contact: fonts at gnome dot
org.
Arev Fonts Copyright
------------------------------
Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the fonts accompanying this license ("Fonts") and
associated documentation files (the "Font Software"), to reproduce
and distribute the modifications to the Bitstream Vera Font Software,
including without limitation the rights to use, copy, merge, publish,
distribute, and/or sell copies of the Font Software, and to permit
persons to whom the Font Software is furnished to do so, subject to
the following conditions:
The above copyright and trademark notices and this permission notice
shall be included in all copies of one or more of the Font Software
typefaces.
The Font Software may be modified, altered, or added to, and in
particular the designs of glyphs or characters in the Fonts may be
modified and additional glyphs or characters may be added to the
Fonts, only if the fonts are renamed to names not containing either
the words "Tavmjong Bah" or the word "Arev".
This License becomes null and void to the extent applicable to Fonts
or Font Software that has been modified and is distributed under the
"Tavmjong Bah Arev" names.
The Font Software may be sold as part of a larger software package but
no copy of one or more of the Font Software typefaces may be sold by
itself.
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL
TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the name of Tavmjong Bah shall not
be used in advertising or otherwise to promote the sale, use or other
dealings in this Font Software without prior written authorization
from Tavmjong Bah. For further information, contact: tavmjong @ free
. fr.
TeX Gyre DJV Math
-----------------
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
Math extensions done by B. Jackowski, P. Strzelczyk and P. Pianowski
(on behalf of TeX users groups) are in public domain.
Letters imported from Euler Fraktur from AMSfonts are (c) American
Mathematical Society (see below).
Bitstream Vera Fonts Copyright
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera
is a trademark of Bitstream, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of the fonts accompanying this license (“Fonts”) and associated
documentation
files (the “Font Software”), to reproduce and distribute the Font Software,
including without limitation the rights to use, copy, merge, publish,
distribute,
and/or sell copies of the Font Software, and to permit persons to whom
the Font Software is furnished to do so, subject to the following
conditions:
The above copyright and trademark notices and this permission notice
shall be
included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular
the designs of glyphs or characters in the Fonts may be modified and
additional
glyphs or characters may be added to the Fonts, only if the fonts are
renamed
to names not containing either the words “Bitstream” or the word “Vera”.
This License becomes null and void to the extent applicable to Fonts or
Font Software
that has been modified and is distributed under the “Bitstream Vera”
names.
The Font Software may be sold as part of a larger software package but
no copy
of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
FOUNDATION
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL,
SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN
ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR
INABILITY TO USE
THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the names of GNOME, the GNOME
Foundation,
and Bitstream Inc., shall not be used in advertising or otherwise to promote
the sale, use or other dealings in this Font Software without prior written
authorization from the GNOME Foundation or Bitstream Inc., respectively.
For further information, contact: fonts at gnome dot org.
AMSFonts (v. 2.2) copyright
The PostScript Type 1 implementation of the AMSFonts produced by and
previously distributed by Blue Sky Research and Y&Y, Inc. are now freely
available for general use. This has been accomplished through the
cooperation
of a consortium of scientific publishers with Blue Sky Research and Y&Y.
Members of this consortium include:
Elsevier Science IBM Corporation Society for Industrial and Applied
Mathematics (SIAM) Springer-Verlag American Mathematical Society (AMS)
In order to assure the authenticity of these fonts, copyright will be
held by
the American Mathematical Society. This is not meant to restrict in any way
the legitimate use of the fonts, such as (but not limited to) electronic
distribution of documents containing these fonts, inclusion of these fonts
into other public domain or commercial font collections or computer
applications, use of the outline data to create derivative fonts and/or
faces, etc. However, the AMS does require that the AMS copyright notice be
removed from any derivative versions of the fonts which have been altered in
any way. In addition, to ensure the fidelity of TeX documents using Computer
Modern fonts, Professor Donald Knuth, creator of the Computer Modern faces,
has requested that any alterations which yield different font metrics be
given a different name.
$Id$

View File

@ -1 +0,0 @@
WEBVTT

12
assets/ffpapi.service Normal file
View File

@ -0,0 +1,12 @@
[Unit]
Description=Rest API for ffplayout
After=network.target remote-fs.target
[Service]
ExecStart=/usr/bin/ffpapi -l 0.0.0.0:8787
Restart=always
RestartSec=1
User=ffpu
[Install]
WantedBy=multi-user.target

View File

@ -22,7 +22,7 @@ server {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 36000s;
proxy_connect_timeout 36000s;
proxy_connect_timeout 36000s;
proxy_send_timeout 36000s;
proxy_buffer_size 128k;
proxy_buffers 4 256k;
@ -31,16 +31,6 @@ server {
proxy_pass http://127.0.0.1:8787;
}
location /data {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Connection "";
proxy_http_version 1.1;
proxy_pass http://127.0.0.1:8787/data;
}
location /live/ {
alias /usr/share/ffplayout/public/live/;
}

View File

@ -3,7 +3,7 @@ Description=Rust and ffmpeg based playout solution
After=network.target remote-fs.target
[Service]
ExecStart=/usr/bin/ffplayout -l 0.0.0.0:8787
ExecStart=/usr/bin/ffplayout
Restart=always
StartLimitInterval=20
RestartSec=1

145
assets/ffplayout.yml Normal file
View File

@ -0,0 +1,145 @@
general:
help_text: Sometimes it can happen, that a file is corrupt but still playable,
this can produce an streaming error over all following files. The only way
in this case is, to stop ffplayout and start it again. Here we only say when
it stops, the starting process is in your hand. Best way is a systemd service
on linux. 'stop_threshold' stop ffplayout, if it is async in time above this
value. A number below 3 can cause unexpected errors.
stop_threshold: 11
stat_file: .ffp_status
rpc_server:
help_text: Run a JSON RPC server, for getting infos about current playing and
for some control functions.
enable: true
address: 127.0.0.1:7070
authorization: av2Kx8g67lF9qj5wEH3ym1bI4cCs
mail:
help_text: Send error messages to email address, like missing playlist; invalid
json format; missing clip path. Leave recipient blank, if you don't need this.
'mail_level' can be INFO, WARNING or ERROR. 'interval' means seconds until
a new mail will be sended.
subject: Playout Error
smtp_server: mail.example.org
starttls: true
sender_addr: ffplayout@example.org
sender_pass: "abc123"
recipient:
mail_level: ERROR
interval: 30
logging:
help_text: If 'log_to_file' is true, log to file, when is false log to console.
'backup_count' says how long log files will be saved in days. 'local_time' to
false will set log timestamps to UTC. Path to /var/log/ only if you run this
program as daemon. 'level' can be DEBUG, INFO, WARNING, ERROR.
'ffmpeg_level' can be info, warning, error.
log_to_file: true
backup_count: 7
local_time: true
timestamp: true
path: /var/log/ffplayout/
level: DEBUG
ffmpeg_level: error
ingest_level: warning
processing:
help_text: Default processing for all clips, to have them unique. Mode can be playlist
or folder. 'aspect' must be a float number. 'logo' is only used if the path exist.
'logo_scale' scale the logo to target size, leave it blank when no scaling
is needed, format is 'width:height', for example '100:-1' for proportional
scaling. With 'logo_opacity' logo can become transparent. With 'audio_tracks' it
is possible to configure how many audio tracks should be processed. 'audio_channels'
can be use, if audio has more channels then only stereo. With 'logo_filter'
'overlay=W-w-12:12' you can modify the logo position. With 'custom_filter'
it is possible, to apply further filters. The filter outputs should end with
[c_v_out] for video filter, and [c_a_out] for audio filter.
mode: playlist
audio_only: false
width: 1024
height: 576
aspect: 1.778
fps: 25
add_logo: true
logo: /usr/share/ffplayout/logo.png
logo_scale:
logo_opacity: 0.7
logo_filter: overlay=W-w-12:12
audio_tracks: 1
audio_channels: 2
volume: 1
custom_filter:
ingest:
help_text: Run a server for a ingest stream. This stream will override the normal streaming
until is done. There is only a very simple authentication mechanism, which check if the
stream name is correct. 'custom_filter' can be used in the same way then the one in the
process section.
enable: false
input_param: -f live_flv -listen 1 -i rtmp://127.0.0.1:1936/live/stream
custom_filter:
playlist:
help_text: >
'path' can be a path to a single file, or a directory. For directory put
only the root folder, for example '/playlists', subdirectories are read by the
script. Subdirectories needs this structure '/playlists/2018/01'. 'day_start'
means at which time the playlist should start, leave day_start blank when playlist
should always start at the begin. 'length' represent the target length from
playlist, when is blank real length will not consider. 'infinit: true' works with
single playlist file and loops it infinitely.
path: /var/lib/ffplayout/playlists
day_start: "5:59:25"
length: "24:00:00"
infinit: false
storage:
help_text: Play ordered or randomly files from path. 'filler_clip' is for fill
the end to reach 24 hours, it will loop when is necessary. 'extensions' search
only files with this extension. Set 'shuffle' to 'true' to pick files randomly.
path: "/var/lib/ffplayout/tv-media"
filler_clip: "/var/lib/ffplayout/tv-media/filler/filler.mp4"
extensions:
- "mp4"
- "mkv"
shuffle: true
text:
help_text: Overlay text in combination with libzmq for remote text manipulation.
On windows fontfile path need to be like this 'C\:/WINDOWS/fonts/DejaVuSans.ttf'.
'text_from_filename' activate the extraction from text of a filename. With 'style'
you can define the drawtext parameters like position, color, etc. Post Text over
API will override this. With 'regex' you can format file names, to get a title from it.
add_text: true
text_from_filename: false
fontfile: "/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf"
style: "x=(w-tw)/2:y=(h-line_h)*0.9:fontsize=24:fontcolor=#ffffff:box=1:boxcolor=#000000:boxborderw=4"
regex: ^.+[/\\](.*)(.mp4|.mkv)$
out:
help_text: The final playout compression. Set the settings to your needs. 'mode'
has the options 'desktop', 'hls', 'null', 'stream'. Use 'stream' and adjust
'output_param:' settings when you want to stream to a rtmp/rtsp/srt/... server.
In production don't serve hls playlist with ffpapi, use nginx or another web server!
mode: hls
output_param: >-
-c:v libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate 1300k
-bufsize 2600k
-preset faster
-tune zerolatency
-profile:v Main
-level 3.1
-c:a aac
-ar 44100
-b:a 128k
-flags +cgop
-f hls
-hls_time 6
-hls_list_size 600
-hls_flags append_list+delete_segments+omit_endlist
-hls_segment_filename /usr/share/ffplayout/public/live/stream-%d.ts
/usr/share/ffplayout/public/live/stream.m3u8

14
assets/ffplayout@.service Normal file
View File

@ -0,0 +1,14 @@
[Unit]
Description=Rust and ffmpeg based multi channel playout solution
After=network.target remote-fs.target
[Service]
ExecStart=/usr/bin/ffplayout %I
Restart=always
StartLimitInterval=20
RestartSec=1
KillMode=mixed
User=ffpu
[Install]
WantedBy=multi-user.target

8
debian/postinst vendored
View File

@ -13,12 +13,18 @@ fi
if [ ! -d "/usr/share/ffplayout/db" ]; then
mkdir "/usr/share/ffplayout/db"
mkdir -p "/usr/share/ffplayout/public/live"
mkdir -p "/var/lib/ffplayout/playlists"
mkdir -p "/var/lib/ffplayout/tv-media"
IP=$(hostname -I | cut -d ' ' -f1)
/usr/bin/ffpapi -i -d "${IP}:8787"
chown -R ${sysUser}: "/usr/share/ffplayout"
chown -R ${sysUser}: "/var/lib/ffplayout"
chown -R ${sysUser}: "/etc/ffplayout"
ln -s "/var/lib/ffplayout/tv-media" "/usr/share/ffplayout/public/"
fi
if [ ! -d "/var/log/ffplayout" ]; then

2
debian/postrm vendored
View File

@ -6,7 +6,7 @@ sysUser="ffpu"
case "$1" in
abort-install|purge)
deluser $sysUser
rm -rf /usr/share/ffplayout /var/log/ffplayout /var/lib/ffplayout /home/$sysUser
rm -rf /usr/share/ffplayout /var/log/ffplayout /etc/ffplayout /var/lib/ffplayout /home/$sysUser
;;
remove)

View File

@ -1,38 +1,43 @@
FROM alpine:latest
FROM almalinux:9 AS base
ARG FFPLAYOUT_VERSION=0.24.0-rc3
ARG SHARED_STORAGE=false
ENV container docker
ENV DB=/db
ENV SHARED_STORAGE=${SHARED_STORAGE}
RUN (cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == \
systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*; \
rm -f /etc/systemd/system/*.wants/*; \
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*; \
rm -f /lib/systemd/system/anaconda.target.wants/*
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
FROM base
COPY <<-EOT /run.sh
#!/bin/sh
ARG FFPLAYOUT_VERSION=0.19.0
COPY README.md *.rpm /tmp/
if [ ! -f /db/ffplayout.db ]; then
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
fi
RUN dnf update -y && \
dnf install -y epel-release && \
dnf install -y 'dnf-command(config-manager)' && \
dnf config-manager --set-enabled crb && \
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/free/el/rpmfusion-free-release-$(rpm -E %rhel).noarch.rpm && \
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-$(rpm -E %rhel).noarch.rpm && \
dnf install -y ffmpeg ffmpeg-devel wget dejavu-sans-fonts sudo && \
dnf clean all
/usr/bin/ffplayout -l "0.0.0.0:8787"
EOT
RUN apk update && \
apk upgrade && \
apk add --no-cache ffmpeg sqlite font-dejavu && \
chmod +x /run.sh
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
cd /tmp && \
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
cp ffplayout /usr/bin/ && \
mkdir -p /usr/share/ffplayout/ && \
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
rm -rf /tmp/* && \
mkdir ${DB}
RUN [[ -f /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm ]] || wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm" -P /tmp/ && \
dnf install -y /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
rm /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
sed -i "s/User=ffpu/User=root/g" /usr/lib/systemd/system/ffpapi.service && \
sed -i "s/User=ffpu/User=root/g" /usr/lib/systemd/system/ffplayout.service && \
sed -i "s/User=ffpu/User=root/g" /usr/lib/systemd/system/ffplayout@.service && \
systemctl enable ffplayout && \
systemctl enable ffpapi && \
ffpapi -u admin -p admin -m contact@example.com
EXPOSE 8787
CMD ["/run.sh"]
VOLUME [ "/sys/fs/cgroup" ]
CMD ["/usr/sbin/init"]

View File

@ -1,17 +1,29 @@
# Run ffplayout in container
## Base Image
Use of [CentOS image](https://hub.docker.com/_/centos) as base image as it offer the possibility to use systemd.
In order to run systemd in a container it has to run in privileged mode and bind to the `cgroup` of the host.
## Image
In addition to the base image, there is the compilation of ffmpeg and all lib from source based on https://github.com/jrottenberg/ffmpeg.
We can't use directly the image from `jrottenberg/ffmpeg` as it compile ffmpeg with the flag `--enable-small` that remove some part of the json from the ffprobe command.
The image is build with a default user/pass `admin/admin`.
You can take a look at the [Dockerfile](Dockerfile)
You can take a look à the [Dockerfile](Dockerfile)
### /!\ as ffmpeg is compiled with `--enable-nonfree` don't push it to a public registry nor distribute the image /!\
## Storage
There are some folders/files that are important for ffplayout to work well such as:
- **/usr/share/ffplayout/db** => where all the data are stored (user/pass etc)
- **/usr/share/ffplayout/db** => where all the data for the `ffpapi` are stored (user/pass etc)
- **/var/lib/ffplayout/tv-media** => where the media are stored by default (configurable)
- **/var/lib/ffplayout/playlists** => where playlists are stored (configurable)
- **/etc/ffplayout/ffplayout.yml** => the core config file
It may be useful to create/link volume for those folders/files.
@ -22,39 +34,24 @@ How to build the image:\
# build default
docker build -t ffplayout-image .
# build from root folder, to copy *.tar.gz with self compiled binary
docker build -f docker/Dockerfile -t ffplayout-image .
# build from root folder, to copy local *.rpm package
docker build -f docker/Dockerfile -t ffplayout-image:alma .
# build ffmpeg from source
docker build -f ffmpeg.Dockerfile -t ffmpeg-build .
docker build -f nonfree.Dockerfile -t ffplayout-image:nonfree .
docker build -f fromSource.Dockerfile -t ffplayout-image:from-source .
# build with nvidia image for hardware support
docker build -f nvidia.Dockerfile -t ffplayout-image:nvidia .
# build with current almalinux image
docker build -f Almalinux.Dockerfile -t ffplayout-image:almalinux .
```
example of command to start the container:
```BASH
docker run -it -v /path/to/db:/db -v /path/to/storage:/tv-media -v /path/to/playlists:/playlists -v /path/to/public:/public -v /path/to/logging:/logging --name ffplayout -p 8787:8787 ffplayout-image
# run in daemon mode
docker run -d --name ffplayout -p 8787:8787 ffplayout-image
# run with docker-compose
docker-compose up -d
```
For setup mail server settings run:
docker run -ti --name ffplayout -v /sys/fs/cgroup:/sys/fs/cgroup:ro --cap-add SYS_ADMIN -p 8787:8787 ffplayout-image
```
docker exec -it ffplayout ffplayout -i
```
Then restart Container
#### Note from CentOS docker hub page
There have been reports that if you're using an Ubuntu host, you will need to add `-v /tmp/$(mktemp -d):/run` to the mount.
There have been reports that if you're using an Ubuntu host, you will need to add `-v /tmp/$(mktemp -d):/run` in addition to the cgroups mount.
## Kubernetes

View File

@ -2,15 +2,12 @@ version: '3'
services:
ffplayout:
cap_add:
- SYS_ADMIN
container_name: ffplayout
build:
context: .
dockerfile: ./Dockerfile
volumes:
- ./data/db:/db
- ./data/storage:/tv-media
- ./data/playlists:/playlists
- ./data/logging:/logging
- ./data/public:/public
ports:
- '8787:8787'
- '8787'
volumes:
- /sys/fs/cgroup:/sys/fs/cgroup

View File

@ -1,158 +0,0 @@
FROM alpine:latest as builder
ENV EXTRA_CFLAGS=-march=generic \
LOCALBUILDDIR=/tmp/build \
LOCALDESTDIR=/tmp/local \
PKG_CONFIG="pkg-config --static" \
PKG_CONFIG_PATH=/tmp/local/lib/pkgconfig \
CPPFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CXXFLAGS="-I/tmp/local/include -O2 -fPIC" \
LDFLAGS="-L/tmp/local/lib -pipe -Wl,-z,relro,-z,now -static" \
CC=clang
RUN apk add --no-cache \
clang \
glib-dev glib-static \
coreutils \
autoconf \
automake \
build-base \
cmake \
git \
libtool \
nasm \
pkgconfig \
yasm \
wget \
curl \
ninja-build \
meson \
cargo cargo-c \
diffutils \
bash
RUN apk add --no-cache \
zlib-dev zlib-static \
bzip2-dev bzip2-static \
expat-dev expat-static \
libxml2-dev libxml2-static \
fontconfig-dev fontconfig-static \
freetype freetype-dev freetype-static \
fribidi-dev fribidi-static \
harfbuzz-dev harfbuzz-static \
graphite2-static \
numactl-dev \
brotli-dev brotli-static \
soxr-dev soxr-static \
libjpeg-turbo libjpeg-turbo-dev \
libpng-dev libpng-static \
xvidcore-dev xvidcore-static \
libsodium-dev libsodium-static \
zeromq-dev libzmq-static \
openssl-dev openssl-libs-static
WORKDIR /tmp
RUN git clone --depth 1 "https://github.com/libass/libass.git" && cd libass && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && cd fdk-aac && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "lame-3.100.tar.gz" "https://downloads.sourceforge.net/project/lame/lame/3.100/lame-3.100.tar.gz" && \
tar xf "lame-3.100.tar.gz" && \
cd "lame-3.100" && \
./configure --prefix="$LOCALDESTDIR" --enable-expopt=full --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "opus-1.4.tar.gz" "https://ftp.osuosl.org/pub/xiph/releases/opus/opus-1.4.tar.gz" && \
tar xf "opus-1.4.tar.gz" && \
cd "opus-1.4" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --enable-static --disable-doc && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/Haivision/srt.git" && cd srt && \
mkdir build && \
cd build && \
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DOPENSSL_USE_STATIC_LIBS=ON -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON -DENABLE_CXX11:BOOLEAN=ON -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc) && \
make install
RUN git clone "https://github.com/webmproject/libvpx.git" && cd libvpx && \
./configure --prefix="$LOCALDESTDIR" --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
make -j $(nproc) && \
make install
RUN git clone "https://code.videolan.org/videolan/x264" && cd x264 && \
./configure --prefix="$LOCALDESTDIR" --enable-static && \
make -j $(nproc) && \
make install
RUN git clone "https://bitbucket.org/multicoreware/x265_git.git" && cd x265_git/build && \
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG $CXXFLAGS" && \
make -j $(nproc) && \
make install
RUN git clone "https://github.com/xiph/rav1e.git" && cd rav1e && \
RUSTFLAGS="-C target-feature=+crt-static" cargo cinstall --release --jobs $(nproc) --prefix=$LOCALDESTDIR --libdir=$LOCALDESTDIR/lib --includedir=$LOCALDESTDIR/include
RUN git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && cd SVT-AV1/Build && \
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && cd dav1d && \
mkdir build && cd build && \
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
ninja && \
ninja install
RUN git clone --depth 1 https://git.ffmpeg.org/ffmpeg.git && cd ffmpeg && \
sed -i 's/add_ldexeflags -fPIE -pie/add_ldexeflags -fPIE -static-pie/' configure && \
./configure \
--pkg-config-flags=--static \
--extra-cflags="-fopenmp -DZMG_STATIC" \
--extra-ldflags="-fopenmp -Wl,--copy-dt-needed-entries -Wl,--allow-multiple-definition" \
--enable-runtime-cpudetect \
--prefix=/usr/local \
--disable-debug \
--disable-doc \
--disable-ffplay \
--disable-shared \
--enable-gpl \
--enable-version3 \
--enable-nonfree \
--enable-small \
--enable-static \
--enable-libass \
--enable-fontconfig \
--enable-libfdk-aac \
--enable-libfribidi \
--enable-libfreetype \
--enable-libharfbuzz \
--enable-libmp3lame \
--enable-libopus \
--enable-libsoxr \
--enable-libsrt \
--enable-libvpx \
--enable-libx264 \
--enable-libx265 \
--enable-libzmq \
--enable-nonfree \
--enable-openssl \
--enable-libsvtav1 \
--enable-librav1e \
--enable-libdav1d \
--enable-libxvid && \
make -j $(nproc) && \
make install
RUN strip /usr/local/bin/ffmpeg /usr/local/bin/ffprobe

View File

@ -0,0 +1,165 @@
FROM almalinux:9 AS base
ENV container docker
RUN (cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == \
systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*; \
rm -f /etc/systemd/system/*.wants/*; \
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*; \
rm -f /lib/systemd/system/anaconda.target.wants/*
FROM base AS build
WORKDIR /tmp/workdir
ENV SRC=/usr/local \
BUILD=/tmp/build
ARG LD_LIBRARY_PATH=/opt/ffmpeg/lib
ARG PKG_CONFIG_PATH="/opt/ffmpeg/share/pkgconfig:/opt/ffmpeg/lib/pkgconfig:/opt/ffmpeg/lib64/pkgconfig:/lib64/pkgconfig"
ARG LOCALDESTDIR=/opt/ffmpeg
ARG LD_LIBRARY_PATH="/opt/ffmpeg/lib:/opt/ffmpeg/lib64"
RUN \
buildDeps="bzip2 gperf which libticonv autoconf automake cmake diffutils file gcc \
ninja-build wget nasm gcc-c++ git libtool make perl yasm meson x264-devel zlib-devel \
expat-devel fontconfig-devel libxml2-devel lame-devel libpng-devel numactl-devel \
fribidi-devel zeromq-devel freetype-devel opus-devel libass-devel openssl-devel" && \
echo "${SRC}/lib" > /etc/ld.so.conf.d/libc.conf && \
dnf install -y epel-release && \
dnf install -y 'dnf-command(config-manager)' && \
dnf config-manager --set-enabled crb && \
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/free/el/rpmfusion-free-release-$(rpm -E %rhel).noarch.rpm && \
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-$(rpm -E %rhel).noarch.rpm && \
dnf install -y ${buildDeps} && \
mkdir -p ${BUILD}
RUN \
cd ${BUILD} && \
git clone --depth 1 "https://github.com/Haivision/srt.git" && \
cd srt && \
mkdir build && \
cd build && \
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON \
-DENABLE_CXX11:BOOLEAN=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install
RUN \
cd ${BUILD} && \
git clone --depth 1 "https://code.videolan.org/rist/librist.git" && \
cd librist && \
mkdir build && \
cd build && \
meson setup --default-library=static --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" .. && \
ninja && \
ninja install
RUN \
cd ${BUILD} && \
git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && \
cd fdk-aac && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install
RUN \
cd ${BUILD} && \
git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && \
cd SVT-AV1/Build && \
rm -rf * && \
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install
RUN \
cd ${BUILD} && \
git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && \
cd dav1d && \
mkdir build && \
cd build && \
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
ninja && \
ninja install
RUN \
cd ${BUILD} && \
git clone "https://github.com/webmproject/libvpx.git" && \
cd libvpx && \
./configure --prefix="$LOCALDESTDIR" --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install
RUN \
cd ${BUILD} && \
git clone "https://bitbucket.org/multicoreware/x265_git.git" x265 && \
cd x265/build && \
rm -rf * && \
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG" && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install
RUN \
cd ${BUILD} && \
wget "https://ffmpeg.org/releases/ffmpeg-snapshot.tar.bz2" && \
tar xfvj ffmpeg-snapshot.tar.bz2 && \
rm -rf ffmpeg-snapshot.tar.bz2 && \
cd ffmpeg && \
./configure --prefix="$LOCALDESTDIR" --enable-pthreads --extra-libs=-lpthread \
--disable-debug --disable-shared --disable-doc --enable-gpl --enable-version3 --pkg-config-flags=--static \
--enable-nonfree --enable-runtime-cpudetect --enable-fontconfig \
--enable-openssl --enable-libass --enable-libfdk-aac --enable-libfreetype \
--enable-libfribidi --enable-libmp3lame --enable-libopus --enable-libvpx --enable-librist \
--enable-libsrt --enable-libx264 --enable-libx265 --enable-libzmq --enable-libsvtav1 --enable-libdav1d && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install
RUN \
cd / && \
cp /opt/ffmpeg/bin/ff* /usr/local/bin/ && \
rm -rf $BUILD $LOCALDESTDIR && \
dnf -y remove autoconf automake cmake diffutils file gcc ninja-build nasm gcc-c++ git libtool make perl yasm meson \
x264-devel zlib-devel expat-devel fontconfig-devel libxml2-devel lame-devel libpng-devel numactl-devel \
fribidi-devel zeromq-devel freetype-devel opus-devel libass-devel openssl-devel && \
dnf autoremove -y && \
dnf clean all
FROM base
ARG FFPLAYOUT_VERSION=0.18.3
ENV LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib
COPY --from=build /usr/local/ /usr/local/
ADD ./overide.conf /etc/systemd/system/ffplayout.service.d/overide.conf
ADD ./overide.conf /etc/systemd/system/ffpapi.service.d/overide.conf
RUN \
dnf update -y \
dnf install -y epel-release && \
dnf install -y 'dnf-command(config-manager)' && \
dnf config-manager --set-enabled crb && \
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/free/el/rpmfusion-free-release-$(rpm -E %rhel).noarch.rpm && \
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-$(rpm -E %rhel).noarch.rpm && \
dnf install -y wget dejavu-sans-fonts sudo x264-libs fontconfig lame libpng numactl fribidi zeromq freetype opus libass && \
wget -q -O /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm" && \
dnf install -y /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
dnf clean all && \
rm /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
mkdir -p /home/ffpu && chown -R ffpu: /home/ffpu && \
systemctl enable ffplayout && \
systemctl enable ffpapi && \
ffpapi -u admin -p admin -m contact@example.com
EXPOSE 8787
VOLUME [ "/sys/fs/cgroup" ]
CMD ["/usr/sbin/init"]

View File

@ -1,40 +0,0 @@
FROM alpine:latest
ARG FFPLAYOUT_VERSION=0.24.0-rc3
ARG SHARED_STORAGE=false
ENV DB=/db
ENV SHARED_STORAGE=${SHARED_STORAGE}
COPY --from=ffmpeg-build /usr/local/bin/ffmpeg /usr/local/bin/ffmpeg
COPY --from=ffmpeg-build /usr/local/bin/ffprobe /usr/local/bin/ffprobe
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
COPY <<-EOT /run.sh
#!/bin/sh
if [ ! -f /db/ffplayout.db ]; then
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
fi
/usr/bin/ffplayout -l "0.0.0.0:8787"
EOT
RUN apk update && \
apk upgrade && \
apk add --no-cache sqlite font-dejavu && \
chmod +x /run.sh
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
cd /tmp && \
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
cp ffplayout /usr/bin/ && \
mkdir -p /usr/share/ffplayout/ && \
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
rm -rf /tmp/* && \
mkdir ${DB}
EXPOSE 8787
CMD ["/run.sh"]

View File

@ -0,0 +1,112 @@
FROM nvidia/cuda:12.0.1-cudnn8-runtime-centos7
ENV NVIDIA_VISIBLE_DEVICES all
ENV NVIDIA_DRIVER_CAPABILITIES compute,video,utility
ENV NVCODEC_VERSION 8.2.15.6
ENV FFMPEG_VERSION 5.1.2
ENV X264_VERSION=20191217-2245
ENV NASM_VERSION=2.14.02
ENV FDKAAC_VERSION=0.1.5
RUN yum install -y wget
RUN buildDeps="autoconf \
automake \
bzip2 \
cmake3 \
diffutils \
expat-devel \
file \
gcc \
gcc-c++ \
git \
gperf \
libtool \
make \
perl \
python3 \
openssl-devel \
tar \
yasm \
which \
zlib-devel" && \
echo "${SRC}/lib" > /etc/ld.so.conf.d/libc.conf && \
yum --enablerepo=extras install -y epel-release && \
yum --enablerepo=epel install -y ${buildDeps} && \
alternatives --install /usr/bin/cmake cmake /usr/bin/cmake3 0 && \
# Install the tools required to build nasm 2.14.02 \
nasmDeps="asciidoc \
perl-Font-TTF \
perl-Sort-Versions \
xmlto" && \
yum --enablerepo=epel install -y ${nasmDeps}
RUN curl -fsSLO https://www.nasm.us/pub/nasm/releasebuilds/$NASM_VERSION/nasm-$NASM_VERSION.tar.bz2 \
&& tar -xjf nasm-$NASM_VERSION.tar.bz2 \
&& cd nasm-$NASM_VERSION \
&& ./autogen.sh \
&& ./configure \
&& make -j$(nproc) \
&& make install
RUN \
DIR=/tmp/x264 && \
mkdir -p ${DIR} && \
cd ${DIR} && yum install -y wget && \
wget https://download.videolan.org/pub/videolan/x264/snapshots/x264-snapshot-20191217-2245.tar.bz2 && \
tar -xjf x264-snapshot-${X264_VERSION}.tar.bz2 && cd x264-snapshot-${X264_VERSION} && \
./configure --enable-shared --enable-pic --disable-cli && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install
### fdk-aac https://github.com/mstorsjo/fdk-aac
RUN \
DIR=/tmp/fdk-aac && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/mstorsjo/fdk-aac/archive/v${FDKAAC_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
autoreconf -fiv && \
./configure --enable-shared --datadir="${DIR}" && \
make -j $(nproc | awk '{print $1 / 2}') && \
make install && \
rm -rf ${DIR}
RUN git clone --depth 1 https://git.videolan.org/git/ffmpeg/nv-codec-headers \
&& cd nv-codec-headers \
&& make install
ENV PKG_CONFIG_PATH /usr/local/lib/pkgconfig
RUN curl -fsSLO https://ffmpeg.org/releases/ffmpeg-$FFMPEG_VERSION.tar.bz2 \
&& tar -xjf ffmpeg-$FFMPEG_VERSION.tar.bz2 \
&& cd ffmpeg-$FFMPEG_VERSION \
&& ./configure --enable-nvenc --enable-libx264 --enable-gpl --enable-libfdk_aac --enable-nonfree --enable-postproc --enable-shared --enable-version3 \
&& make -j$(nproc) \
&& make install
RUN yum -y install systemd vim pico; yum clean all; \
(cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*;\
rm -f /etc/systemd/system/*.wants/*;\
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*;\
rm -f /lib/systemd/system/anaconda.target.wants/*;
RUN yum -y install net-tools openssh-server
RUN echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
RUN yum update -y \
&& yum install -y dejavu-sans-fonts sudo wget \
&& wget -q -O /tmp/ffplayout-0.18.3-1.x86_64.rpm "https://github.com/ffplayout/ffplayout/releases/download/v0.18.1/ffplayout-0.18.3-1.x86_64.rpm" \
&& yum install -y /tmp/ffplayout-0.18.3-1.x86_64.rpm \
&& yum clean all \
&& echo 'Docker!' | passwd --stdin root \
&& rm /tmp/ffplayout-0.18.3-1.x86_64.rpm \
&& mkdir -p /home/ffpu && chown -R ffpu: /home/ffpu \
&& systemctl enable ffplayout \
&& systemctl enable ffpapi
EXPOSE 8787
RUN echo "/usr/local/lib" >> /etc/ld.so.conf.d/nvidia.conf
RUN echo "/usr/local/cuda/compat/" >> /etc/ld.so.conf.d/nvidia.conf
VOLUME [ "/sys/fs/cgroup", "/tmp", "/run", "/run/lock", "/etc/ffplayout", "/usr/share/ffplayout" ,"/var/lib/ffplayout" ]
CMD ["/usr/sbin/init"]

View File

@ -1,227 +0,0 @@
FROM nvidia/cuda:12.5.0-runtime-rockylinux9
ARG FFPLAYOUT_VERSION=0.24.0-rc3
ARG SHARED_STORAGE=false
ENV DB=/db
ENV SHARED_STORAGE=${SHARED_STORAGE}
ENV EXTRA_CFLAGS=-march=generic \
LOCALBUILDDIR=/tmp/build \
LOCALDESTDIR=/tmp/local \
PKG_CONFIG="pkg-config --static" \
PKG_CONFIG_PATH="/usr/lib64/pkgconfig/:/tmp/local/lib/pkgconfig" \
CPPFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
CXXFLAGS="-I/tmp/local/include -O2 -fPIC" \
LDFLAGS="-L/tmp/local/lib -pipe -Wl,-z,relro,-z,now -static" \
CC=clang
RUN dnf clean all -y && \
dnf makecache --refresh && \
dnf install -y epel-release && \
dnf config-manager --set-enabled crb
RUN dnf install -y which sqlite libstdc++-static libtool autoconf clang \
cmake ninja-build cargo ragel meson git pkgconfig bzip2 \
python3-devel gperf perl glibc-static binutils-devel \
nasm rsync wget
WORKDIR /tmp
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "zlib-1.3.1.tar.gz" "https://zlib.net/zlib-1.3.1.tar.gz" && \
tar xf "zlib-1.3.1.tar.gz" && \
cd "zlib-1.3.1" && \
./configure --prefix="$LOCALDESTDIR" --static && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "openssl-1.1.1u.tar.gz" "https://www.openssl.org/source/openssl-1.1.1u.tar.gz" && \
tar xf "openssl-1.1.1u.tar.gz" && \
cd "openssl-1.1.1u" && \
./Configure --prefix=$LOCALDESTDIR --openssldir=$LOCALDESTDIR linux-x86_64 --libdir="$LOCALDESTDIR/lib" no-shared enable-camellia enable-idea enable-mdc2 enable-rfc3779 -static-libstdc++ -static-libgcc && \
make depend all && \
make install_sw
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "bzip2-1.0.8.tar.gz" "https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz" && \
tar xf "bzip2-1.0.8.tar.gz" && \
cd "bzip2-1.0.8" && \
make install PREFIX="$LOCALDESTDIR"
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "libpng-1.6.40.tar.gz" "http://prdownloads.sourceforge.net/libpng/libpng-1.6.40.tar.gz" && \
tar xf "libpng-1.6.40.tar.gz" && \
cd "libpng-1.6.40" && \
./configure --prefix="$LOCALDESTDIR" --disable-shared && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/fribidi/fribidi.git" && cd fribidi && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) 2>/dev/null || true && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "expat-2.5.0.tar.bz2" "https://github.com/libexpat/libexpat/releases/download/R_2_5_0/expat-2.5.0.tar.bz2" && \
tar xf "expat-2.5.0.tar.bz2" && \
cd "expat-2.5.0" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --without-docbook && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "freetype-2.13.1.tar.gz" "https://sourceforge.net/projects/freetype/files/freetype2/2.13.1/freetype-2.13.1.tar.gz" && \
tar xf "freetype-2.13.1.tar.gz" && \
cd "freetype-2.13.1" && \
./configure --prefix="$LOCALDESTDIR" --disable-shared --with-harfbuzz=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "fontconfig-2.14.2.tar.gz" "https://www.freedesktop.org/software/fontconfig/release/fontconfig-2.14.2.tar.gz" && \
tar xf "fontconfig-2.14.2.tar.gz" && \
cd "fontconfig-2.14.2" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install && \
cp fontconfig.pc "$LOCALDESTDIR/lib/pkgconfig/"
RUN git clone --depth 1 "https://github.com/harfbuzz/harfbuzz.git" && cd harfbuzz && \
mkdir build && cd build && \
meson setup -Denable_tools=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
ninja && \
ninja install
RUN git clone --depth 1 "https://github.com/zeromq/libzmq.git" && cd libzmq && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-static --disable-shared && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/libass/libass.git" && cd libass && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --disable-harfbuzz && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && cd fdk-aac && \
./autogen.sh && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "lame-3.100.tar.gz" "https://downloads.sourceforge.net/project/lame/lame/3.100/lame-3.100.tar.gz" && \
tar xf "lame-3.100.tar.gz" && \
cd "lame-3.100" && \
./configure --prefix="$LOCALDESTDIR" --enable-expopt=full --enable-shared=no && \
make -j $(nproc) && \
make install
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "opus-1.4.tar.gz" "https://ftp.osuosl.org/pub/xiph/releases/opus/opus-1.4.tar.gz" && \
tar xf "opus-1.4.tar.gz" && \
cd "opus-1.4" && \
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --enable-static --disable-doc && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://github.com/Haivision/srt.git" && cd srt && \
mkdir build && \
cd build && \
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DOPENSSL_USE_STATIC_LIBS=ON -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON -DENABLE_CXX11:BOOLEAN=ON -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" -DENABLE_APPS=0 -DENABLE_EXAMPLES=0 && \
make -j $(nproc) && \
make install
RUN git clone "https://github.com/webmproject/libvpx.git" && cd libvpx && \
./configure --prefix="$LOCALDESTDIR" --as=nasm --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
make -j $(nproc) && \
make install
RUN git clone "https://code.videolan.org/videolan/x264" && cd x264 && \
./configure --prefix="$LOCALDESTDIR" --enable-static && \
make -j $(nproc) && \
make install
RUN git clone "https://bitbucket.org/multicoreware/x265_git.git" && cd x265_git/build && \
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG $CXXFLAGS" && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && cd SVT-AV1/Build && \
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
make -j $(nproc) && \
make install
RUN git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && cd dav1d && \
mkdir build && cd build && \
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
ninja && \
ninja install
RUN git clone --depth 1 https://git.videolan.org/git/ffmpeg/nv-codec-headers && cd nv-codec-headers && \
make install PREFIX="$LOCALDESTDIR"
RUN git clone --depth 1 https://git.ffmpeg.org/ffmpeg.git && cd ffmpeg && \
./configure \
--pkg-config-flags=--static \
--extra-cflags="-fopenmp -DZMG_STATIC" \
--extra-ldflags="-fopenmp -Wl,--copy-dt-needed-entries -Wl,--allow-multiple-definition" \
--enable-runtime-cpudetect \
--prefix=/usr/local \
--disable-debug \
--disable-doc \
--disable-ffplay \
--disable-shared \
--enable-gpl \
--enable-version3 \
--enable-nonfree \
--enable-small \
--enable-static \
--enable-libass \
--enable-fontconfig \
--enable-libfdk-aac \
--enable-libfribidi \
--enable-libfreetype \
--enable-libharfbuzz \
--enable-libmp3lame \
--enable-libopus \
--enable-libsrt \
--enable-libvpx \
--enable-libx264 \
--enable-libx265 \
--enable-libzmq \
--enable-nonfree \
--enable-openssl \
--enable-libsvtav1 \
--enable-libdav1d \
--enable-nvenc && \
make -j $(nproc) && \
make install
RUN strip /usr/local/bin/ffmpeg /usr/local/bin/ffprobe
WORKDIR /
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
COPY <<-EOT /run.sh
#!/bin/sh
if [ ! -f /db/ffplayout.db ]; then
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
fi
/usr/bin/ffplayout -l "0.0.0.0:8787"
EOT
RUN chmod +x /run.sh
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
cd /tmp && \
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
cp ffplayout /usr/bin/ && \
mmkdir -p /usr/share/ffplayout/ && \
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
rm -rf /tmp/* && \
mkdir ${DB}
EXPOSE 8787
CMD ["/run.sh"]

View File

@ -21,10 +21,6 @@ Using live ingest to inject a live stream.
The different output modes.
### **[Playlist Generation](/docs/playlist_gen.md)**
Generate playlists based on template.
### **[Multi Audio Tracks](/docs/multi_audio.md)**
Output multiple audio tracks.
@ -44,9 +40,3 @@ Use of remote sources, like https://example.org/video.mp4
### **[ffplayout API](/docs/api.md)**
Control the engine, playlist and config with a ~REST API
### **[Stream Copy](/docs/stream_copy.md)**
Copy audio and or video stream
### **[Advanced Settings](/docs/advanced_settings.md)**

View File

@ -1,94 +0,0 @@
## Advanced settings
With **advanced settings** you can control all ffmpeg inputs/decoder/output and filters.
> **_Note:_** Changing these settings is for advanced users only! There will be no support or guarantee that it will work and be stable after changing them!
For changing this settings you need to have knowledge about hardware encoding with ffmpeg. Good starting points are:
- [HWAccelIntro](https://trac.ffmpeg.org/wiki/HWAccelIntro)
- [VAAPI](https://trac.ffmpeg.org/wiki/Hardware/VAAPI)
- [QuickSync](https://trac.ffmpeg.org/wiki/Hardware/QuickSync)
### Example config
##### Here an example with Intel QuickSync:
```YAML
help: Changing these settings is for advanced users only! There will be no support or guarantee that ffplayout will be stable after changing them.
decoder:
input_param: -hwaccel qsv -init_hw_device qsv=hw -filter_hw_device hw -hwaccel_output_format qsv
# output_param get also applied to ingest instance.
output_param: -c:v mpeg2_qsv -g 1 -b:v 50000k -minrate 50000k -maxrate 50000k -bufsize 25000k -c:a s302m -strict -2 -sample_fmt s16 -ar 48000 -ac 2
filters:
deinterlace: deinterlace_qsv
pad_scale_w: scale_qsv={}:-1
pad_scale_h: scale_qsv=-1:{}
pad_video: 'null' # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2
fps: vpp_qsv=framerate=25
scale: scale_qsv={}:{}
set_dar: 'null' # setdar=dar={}
fade_in: 'null' # fade=in:st=0:d=0.5
fade_out: 'null' # fade=out:st={}:d=1.0
overlay_logo_scale: 'null'
overlay_logo_fade_in: fade=in:st=0:d=1.0 # fade=in:st=0:d=1.0:alpha=1
overlay_logo_fade_out: fade=out:st={}:d=1.0 # fade=out:st={}:d=1.0:alpha=1
overlay_logo: hwupload=extra_hw_frames=64,format=qsv[l];[v][l]overlay_qsv={}:shortest=1
tpad: 'null' # tpad=stop_mode=add:stop_duration={}
drawtext_from_file: hwdownload,format=nv12,drawtext=text='{}':{}{} # drawtext=text='{}':{}{}
drawtext_from_zmq: hwdownload,format=nv12,zmq=b=tcp\\://'{}',drawtext@dyntext={} # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}
aevalsrc: # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000
afade_in: # afade=in:st=0:d=0.5
afade_out: # afade=out:st={}:d=1.0
apad: # apad=whole_dur={}
volume: # volume={}
split: # split={}{}
encoder:
# use `-hwaccel vulkan` when output mode is desktop
input_param: -hwaccel qsv -init_hw_device qsv=hw -filter_hw_device hw -hwaccel_output_format qsv
ingest:
input_param: -hwaccel qsv -init_hw_device qsv=hw -filter_hw_device hw -hwaccel_output_format qsv
```
##### Here an example with Nvidia HW processing
```YAML
help: Changing these settings is for advanced users only! There will be no support or guarantee that it will be stable after changing them.
decoder:
input_param: -thread_queue_size 1024 -hwaccel_device 0 -hwaccel cuvid -hwaccel_output_format cuda
# output_param get also applied to ingest instance.
output_param: -c:v h264_nvenc -preset p2 -tune ll -b:v 50000k -minrate 50000k -maxrate 50000k -bufsize 25000k -c:a s302m -strict -2 -sample_fmt s16 -ar 48000 -ac 2
filters:
deinterlace: 'null'
pad_scale_w: 'null' # scale={}:-1
pad_scale_h: 'null' # scale=-1:{}
pad_video: 'null' # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2
fps: 'null' # fps={}
scale: scale_cuda={}:{}:interp_algo=lanczos:force_original_aspect_ratio=decrease # scale={}:{}
set_dar: 'null' # setdar=dar={}
fade_in: hwdownload,format=nv12,fade=in:st=0:d=0.5,format=nv12,hwupload_cuda # fade=in:st=0:d=0.5
fade_out: hwdownload,format=nv12,fade=out:st={}:d=1.0,format=nv12,hwupload_cuda # fade=out:st={}:d=1.0
overlay_logo_scale: 'null' # scale={}
overlay_logo_fade_in: fade=in:st=0:d=1.0 # fade=in:st=0:d=1.0:alpha=1
overlay_logo_fade_out: fade=out:st={}:d=1.0 # fade=out:st={}:d=1.0:alpha=1
overlay_logo: format=nv12,hwupload_cuda[l];[v][l]overlay_cuda=W-w-12:12:shortest=1,hwdownload,format=nv12
tpad: # tpad=stop_mode=add:stop_duration={}
drawtext_from_file: # drawtext=text='{}':{}{}
drawtext_from_zmq: # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}
aevalsrc: # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000
afade_in: # afade=in:st=0:d=0.5
afade_out: # afade=out:st={}:d=1.0
apad: # apad=whole_dur={}
volume: # volume={}
split: # split={}{}
encoder:
input_param:
ingest:
input_param: -thread_queue_size 1024 -hwaccel_device 0 -hwaccel cuvid -hwaccel_output_format cuda
```
---
**At the moment this function is _experimental_, if you think you found a bug: check full decoder/encoder/ingest command with ffmpeg in terminal. When there the command works you can open a bug report issue.**
Please don't open issues for general command line helps!

View File

@ -3,7 +3,7 @@
Run the API thru the systemd service, or like:
```BASH
ffplayout -l 127.0.0.1:8787
ffpapi -l 127.0.0.1:8787
```
For all endpoints an (Bearer) authentication is required.\
@ -38,19 +38,6 @@ curl -X GET 'http://127.0.0.1:8787/api/user' -H 'Content-Type: application/json'
-H 'Authorization: Bearer <TOKEN>'
```
**Get User by ID**
```BASH
curl -X GET 'http://127.0.0.1:8787/api/user/2' -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
```BASH
curl -X GET 'http://127.0.0.1:8787/api/users' -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
**Update current User**
```BASH
@ -66,13 +53,7 @@ curl -X POST 'http://127.0.0.1:8787/api/user/' -H 'Content-Type: application/jso
-H 'Authorization: Bearer <TOKEN>'
```
```BASH
curl -X GET 'http://127.0.0.1:8787/api/user/2' -H 'Content-Type: application/json' \
-H 'Authorization: Bearer <TOKEN>'
```
#### Settings
#### ffpapi Settings
**Get Settings from Channel**
@ -87,7 +68,9 @@ curl -X GET http://127.0.0.1:8787/api/channel/1 -H "Authorization: Bearer <TOKEN
"id": 1,
"name": "Channel 1",
"preview_url": "http://localhost/live/preview.m3u8",
"config_path": "/etc/ffplayout/ffplayout.yml",
"extra_extensions": "jpg,jpeg,png",
"service": "ffplayout.service",
"utc_offset": "+120"
}
```
@ -102,7 +85,7 @@ curl -X GET http://127.0.0.1:8787/api/channels -H "Authorization: Bearer <TOKEN>
```BASH
curl -X PATCH http://127.0.0.1:8787/api/channel/1 -H "Content-Type: application/json" \
-d '{ "id": 1, "name": "Channel 1", "preview_url": "http://localhost/live/stream.m3u8", "extra_extensions": "jpg,jpeg,png"}' \
-d '{ "id": 1, "name": "Channel 1", "preview_url": "http://localhost/live/stream.m3u8", "config_path": "/etc/ffplayout/ffplayout.yml", "extra_extensions": "jpg,jpeg,png"}' \
-H "Authorization: Bearer <TOKEN>"
```
@ -110,7 +93,7 @@ curl -X PATCH http://127.0.0.1:8787/api/channel/1 -H "Content-Type: application/
```BASH
curl -X POST http://127.0.0.1:8787/api/channel/ -H "Content-Type: application/json" \
-d '{ "name": "Channel 2", "preview_url": "http://localhost/live/channel2.m3u8", "extra_extensions": "jpg,jpeg,png" }' \
-d '{ "name": "Channel 2", "preview_url": "http://localhost/live/channel2.m3u8", "config_path": "/etc/ffplayout/channel2.yml", "extra_extensions": "jpg,jpeg,png", "service": "ffplayout@channel2.service" }' \
-H "Authorization: Bearer <TOKEN>"
```
@ -122,28 +105,13 @@ curl -X DELETE http://127.0.0.1:8787/api/channel/2 -H "Authorization: Bearer <TO
#### ffplayout Config
**Get Advanced Config**
```BASH
curl -X GET http://127.0.0.1:8787/api/playout/advanced/1 -H 'Authorization: Bearer <TOKEN>'
```
Response is a JSON object
**Update Advanced Config**
```BASH
curl -X PUT http://127.0.0.1:8787/api/playout/advanced/1 -H "Content-Type: application/json" \
-d { <CONFIG DATA> } -H 'Authorization: Bearer <TOKEN>'
```
**Get Config**
```BASH
curl -X GET http://127.0.0.1:8787/api/playout/config/1 -H 'Authorization: Bearer <TOKEN>'
```
Response is a JSON object
Response is a JSON object from the ffplayout.yml
**Update Config**
@ -174,7 +142,7 @@ curl -X PUT http://127.0.0.1:8787/api/presets/1 -H 'Content-Type: application/js
**Add new Preset**
```BASH
curl -X POST http://127.0.0.1:8787/api/presets/1/ -H 'Content-Type: application/json' \
curl -X POST http://127.0.0.1:8787/api/presets/ -H 'Content-Type: application/json' \
-d '{ "name": "<PRESET NAME>", "text": "TEXT>", "x": "<X>", "y": "<Y>", "fontsize": 24, "line_spacing": 4, "fontcolor": "#ffffff", "box": 1, "boxcolor": "#000000", "boxborderw": 4, "alpha": 1.0, "channel_id": 1 }' \
-H 'Authorization: Bearer <TOKEN>'
```
@ -223,19 +191,38 @@ curl -X GET http://127.0.0.1:8787/api/control/1/media/current
**Response:**
```JSON
{
"media": {
{
"jsonrpc": "2.0",
"result": {
"current_media": {
"category": "",
"duration": 154.2,
"out": 154.2,
"in": 0.0,
"seek": 0.0,
"source": "/opt/tv-media/clip.mp4"
},
"index": 39,
"ingest": false,
"mode": "playlist",
"played": 67.808
}
"play_mode": "playlist",
"played_sec": 67.80771999300123,
"remaining_sec": 86.39228000699876,
"start_sec": 24713.631999999998,
"start_time": "06:51:53.631"
},
"id": 1
}
```
**Get next Clip**
```BASH
curl -X GET http://127.0.0.1:8787/api/control/1/media/next/ -H 'Authorization: Bearer <TOKEN>'
```
**Get last Clip**
```BASH
curl -X GET http://127.0.0.1:8787/api/control/1/media/last/
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```
#### ffplayout Process Control
@ -266,7 +253,7 @@ curl -X GET http://127.0.0.1:8787/api/playlist/1?date=2022-06-20
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
--data "{<JSON playlist data>}"
-- data "{<JSON playlist data>}"
```
**Generate Playlist**
@ -274,18 +261,8 @@ curl -X POST http://127.0.0.1:8787/api/playlist/1/
A new playlist will be generated and response.
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/generate/2022-06-20
curl -X GET http://127.0.0.1:8787/api/playlist/1/generate/2022-06-20
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
/// --data '{ "paths": [<list of paths>] }' # <- data is optional
```
Or with template:
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/generate/2023-00-05
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
--data '{"template": {"sources": [\
{"start": "00:00:00", "duration": "10:00:00", "shuffle": true, "paths": ["path/1", "path/2"]}, \
{"start": "10:00:00", "duration": "14:00:00", "shuffle": false, "paths": ["path/3", "path/4"]}]}}'
```
**Delete Playlist**
@ -297,10 +274,10 @@ curl -X DELETE http://127.0.0.1:8787/api/playlist/1/2022-06-20
### Log file
**Read Log File**
**Read Log Life**
```BASH
curl -X GET http://127.0.0.1:8787/api/log/1?date=2022-06-20
curl -X GET http://127.0.0.1:8787/api/log/1
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```
@ -341,22 +318,6 @@ curl -X PUT http://127.0.0.1:8787/api/file/1/upload/ -H 'Authorization: Bearer <
-F "file=@file.mp4"
```
**Get File**
Can be used for preview video files
```BASH
curl -X GET http://127.0.0.1:8787/file/1/path/to/file.mp4
```
**Get Public**
Can be used for HLS Playlist and other static files in public folder
```BASH
curl -X GET http://127.0.0.1:8787/live/stream.m3u8
```
**Import playlist**
Import text/m3u file and convert it to a playlist
@ -390,12 +351,3 @@ curl -X GET http://127.0.0.1:8787/api/program/1/?start_after=2022-11-13T10:00:00
-H 'Authorization: Bearer <TOKEN>'
```
### System Statistics
Get statistics about CPU, Ram, Disk, etc. usage.
```BASH
curl -X GET http://127.0.0.1:8787/api/system/1
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
```

View File

@ -1,23 +0,0 @@
## Closed Captions
#### Note:
**This is only an _experimental feature_. Please be aware that bugs and unexpected behavior may occur. To utilize this feature, a version after 7.1 of FFmpeg is required. Importantly, there is currently no official support for this functionality.**
### Usage
**ffplayout** can handle closed captions in WebVTT format for HLS streaming.
The captions can be embedded in the file, such as in a [Matroska](https://www.matroska.org/technical/subtitles.html) file, or they can be a separate *.vtt file that shares the same filename as the video file. In either case, the processing option **vtt_enable** must be enabled, and the path to the **vtt_dummy** file must exist.
To encode the closed captions, the **hls** mode needs to be enabled, and specific output parameters must be provided. Heres an example:
```
-c:v libx264 -crf 23 -x264-params keyint=50:min-keyint=25:scenecut=-1 \
-maxrate 1300k -bufsize 2600k -preset faster -tune zerolatency \
-profile:v Main -level 3.1 -c:a aac -ar 44100 -b:a 128k -flags +cgop \
-muxpreload 0 -muxdelay 0 -f hls -hls_time 6 -hls_list_size 600 \
-hls_flags append_list+delete_segments+omit_endlist \
-var_stream_map v:0,a:0,s:0,sgroup:subs,sname:English,language:en-US,default:YES \
-master_pl_name master.m3u8 \
-hls_segment_filename \
live/stream-%d.ts live/stream.m3u8
```

View File

@ -1,10 +1,10 @@
## Custom filter
ffplayout allows the definition of a custom filter string. For this, the parameter **custom_filter** is available in the playout configuration under **processing**. The playlist can also contain a **custom_filter** parameter for each clip, with the same usage.
ffplayout allows it to define a custom filter string. For that is the parameter **custom_filter** in the **ffplayout.yml** config file under **processing**. The playlist can also contain a **custom_filter** parameter for every clip, with the same usage.
The filter outputs should end with `[c_v_out]` for video filters and `[c_a_out]` for audio filters. The filters will be applied to every clip and after the filters that unify the clips.
The filter outputs should end with `[c_v_out]` for video filter, and `[c_a_out]` for audio filter. The filters will be apply on every clip and after the filters which unify the clips.
It is possible to apply only video filters, only audio filters, or both. For a better understanding, here are some examples:
It is possible to apply only video or audio filters, or both. For a better understanding here some examples:
#### Apply Gaussian blur and volume filter:
@ -24,18 +24,6 @@ custom_filter: "loudnorm=I=-18:TP=-1.5:LRA=11[c_a_out]"
custom_filter: "[v_in];movie=/path/to/lower_third.png:loop=0,scale=1024:576,setpts=N/(25*TB)[lower];[v_in][lower]overlay=0:0:shortest=1[c_v_out]"
```
#### Overlay current time:
```YAML
custom_filter: "drawtext=text='%{localtime\:%H\\\:%M\\\:%S}':fontcolor=white:fontsize=40:x=w-tw-20:y=20:box=1:boxcolor=red@0.7:boxborderw=10[c_v_out]"
```
#### Scrolling text with static background:
```YAML
custom_filter: "drawbox=x=0:y=in_h-(in_h/6):w=in_w:h=60:t=fill:color=#000000@0x73,drawtext=text='Hello World':x='ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),w+4,w-w/12*mod(t-ld(1),12*(w+tw)/w))':y='main_h-(main_h/6)+20':fontsize=24:fontcolor=#f2f2f2"
```
Pay attention to the filter prefix `[v_in];`, this is necessary to get the output from the regular filters.
#### Paint effect
@ -48,24 +36,14 @@ custom_filter: "edgedetect=mode=colormix:high=0[c_v_out]"
The **custom filter** from **config -> processing** and from **playlist** got applied in the _decoder_ instance on every file:
```mermaid
flowchart LR
subgraph fileloop["file loop"]
direction LR
Input --> dec
subgraph filter["start new on file change"]
direction LR
dec["decoder / filtering / custom filter"]
end
end
subgraph fileout["constant output"]
direction LR
enc["encoder / text overlay"]
end
dec -- PIPE --> enc
enc --> output
```
+-------------------------------------------------- +
| file loop |
| +-------------------------------------+ | PIPE +------------------------+
| input -> | decoder / filtering / custom filter |-------------| encoder / text overlay | -> output
| +-------------------------------------+ | +------------------------+
| start new on file change | constant output
+---------------------------------------------------+
```
#### When take which
@ -84,6 +62,7 @@ custom_filter: "[v_in];movie=image_input.png:s=v,loop=loop=250.0:size=1:start=0,
And here are the explanation for each filter:
```PYTHON
# get input from video
[v_in];

View File

@ -2,6 +2,24 @@
For compiling use always the news Rust version, the best is to install it from [rustup](https://rustup.rs/).
### Cross Compile
For cross compiling on fedora linux, you need to install some extra packages:
- mingw compiler:
```
dnf install mingw64-filesystem mingw64-binutils mingw64-gcc{,-c++} mingw64-crt mingw64-headers mingw64-pkg-config mingw64-hamlib mingw64-libpng mingw64-libusbx mingw64-portaudio mingw64-fltk mingw64-libgnurx mingw64-gettext mingw64-winpthreads-static intltool
```
- rust tools:
```
rustup target add x86_64-pc-windows-gnu
```
[Cross](https://github.com/cross-rs/cross#dependencies) could be an option to.
To build, run: `cargo build --release --target=x86_64-pc-windows-gnu`
### Static Linking
Running `cargo build` ends up in a binary which depend on **libc.so**. But you can compile also the binary totally static:
@ -15,100 +33,110 @@ Compile with: `cargo build --release --target=x86_64-unknown-linux-musl`.
This release should run on any Linux distro.
**Note: You can also create a static version with Cross Toolchain. For this, follow the next steps.**
### Cross Compile
For cross compiling install docker or podman and latest [cross-rs](https://github.com/cross-rs/cross):
```
cargo install cross --git https://github.com/cross-rs/cross
```
To build for windows, run: `cross build --release --target x86_64-pc-windows-gnu`\
To build for linux aarch64: `cross build --release --target aarch64-unknown-linux-gnu`
Etc.
### Compile from Linux for macOS
Follow [cross-toolchains](https://github.com/cross-rs/cross-toolchains) instruction to add macOS support to cross.
Add toolchain:
I created my image with:
```Bash
# for arm64
rustup target add aarch64-apple-darwin
```
cargo build-docker-image x86_64-apple-darwin-cross \
--build-arg 'MACOS_SDK_URL=https://github.com/joseluisq/macosx-sdks/releases/download/12.3/MacOSX12.3.sdk.tar.xz'
# for x86_64
rustup target add x86_64-apple-darwin
```
Build then with:
Add linker and ar settings to `~/.cargo/config`:
```
cross build --release --target aarch64-apple-darwin
```Bash
[target.x86_64-apple-darwin]
linker = "x86_64-apple-darwin20.4-clang"
ar = "x86_64-apple-darwin20.4-ar"
[target.aarch64-apple-darwin]
linker = "aarch64-apple-darwin20.4-clang"
ar = "aarch64-apple-darwin20.4-ar"
```
Follow this guide: [rust-cross-compile-linux-to-macos](https://wapl.es/rust/2019/02/17/rust-cross-compile-linux-to-macos.html)
Or setup [osxcross](https://github.com/tpoechtrager/osxcross) correctly.
Add **osxcross/target/bin** to your **PATH** and run cargo with:
```Bash
# for arm64
CC="aarch64-apple-darwin20.4-clang -arch arm64e" cargo build --release --target=aarch64-apple-darwin
# for x86_64
CC="o64-clang" cargo build --release --target=x86_64-apple-darwin
```
### Compile for armv7 Linux
Add toolchain:
```Bash
rustup target add armv7-unknown-linux-gnueabihf
```
Add cross compiler:
```Bash
dnf copr enable lantw44/arm-linux-gnueabihf-toolchain
dnf install arm-linux-gnueabihf-{binutils,gcc,glibc}
```
Add target to `~/.cargo/config`:
```Bash
[target.armv7-unknown-linux-gnueabihf]
linker = "arm-linux-gnueabihf-gcc"
rustflags = [ "-C", "target-feature=+crt-static", "-C", "link-arg=-lgcc" ]
```
### Compile for aarch64 Linux
Add toolchain:
```Bash
rustup target add aarch64-unknown-linux-gnu
```
Add cross compiler:
```Bash
dnf copr enable lantw44/aarch64-linux-gnu-toolchain
dnf install aarch64-linux-gnu-{binutils,gcc,glibc}
```
Add target to `~/.cargo/config`:
```Bash
[target.aarch64-unknown-linux-gnu]
linker = "aarch64-linux-gnu-gcc"
rustflags = [ "-C", "target-feature=+crt-static", "-C", "link-arg=-lgcc" ]
```
### Create debian DEB and RHEL RPM packages
install:
- `cargo install cargo-deb`
- `cargo install cargo-generate-rpm`
Compile to your target system with cargo or cross, and run:
And run with:
```Bash
# for debian based systems:
cargo deb --no-build --target=x86_64-unknown-linux-musl
cargo deb --target=x86_64-unknown-linux-musl
# for armhf
cargo deb --no-build --target=armv7-unknown-linux-gnueabihf --variant=armhf -p ffplayout --manifest-path=ffplayout-engine/Cargo.toml
cargo deb --target=armv7-unknown-linux-gnueabihf --variant=armhf -p ffplayout --manifest-path=ffplayout-engine/Cargo.toml
# for arm64
cargo deb --no-build --target=aarch64-unknown-linux-gnu --variant=arm64 -p ffplayout --manifest-path=ffplayout-engine/Cargo.toml
cargo deb --target=aarch64-unknown-linux-gnu --variant=arm64 -p ffplayout --manifest-path=ffplayout-engine/Cargo.toml
# for rhel based systems:
cargo generate-rpm --target=x86_64-unknown-linux-musl
```
## Generate types for Frontend
The frontend uses TypeScript, to generate types for the rust structs run: `cargo test`.
The generated types are then in [types folder](/frontend/types).
## Setup Frontend
Make sure to install the dependencies:
```bash
# yarn
yarn install
# npm
npm install
# pnpm
pnpm install --shamefully-hoist
```
## Development Server
Start the development server on http://localhost:3000
```bash
npm run dev
```
## Production
Build the application for production:
```bash
npm run build
```
Locally preview production build:
```bash
npm run preview
```
Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information.

View File

@ -1,9 +1,10 @@
### Folder Mode
ffplayout can play files from a folder; no playlists are required for this mode. This folder is monitored for changes, and when new files are added or deleted, they are registered and updated accordingly.
ffplayout can play files from a folder, no playlists are required for this mode. This folder is monitored for changes, and when new files are added or deleted, this is registered and updated accordingly.
You just need to set `mode: folder` in the config under `processing:`, and under `storage:`, you have to specify the correct folder and the file extensions you want to scan for.
You just have to set `mode: folder` in the config under `processing:` and under `storage:` you have to enter the correct folder and the file extensions you want to scan for.
Additionally, there is a **shuffle** mode. If this is activated, the files will be played randomly.
Additionally there is a **shuffle** mode, if this is activated, the files will be played randomly.
If shuffle mode is off, the clips will be played in sorted order.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 306 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 180 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 173 KiB

View File

@ -1,12 +1,12 @@
In some situations, application closure may occur in conjunction with Live Ingest.
Here is an example in combination with SRS:
Here is an example, in combination with SRS:
When a live stream is sent, it is forwarded to ffplayout, which then switches the TV program to the live stream.
Problems can occur if the internet connection for the live stream is not stable. In such cases, timeouts can occur, SRS breaks the connection to the playout, and the entire ffplayout process has to be restarted. The default timeout is 5000ms, or 5 seconds.
Problems now occur if the internet connection for the live stream is not stable. Then timeouts can occur, SRS breaks the connection to the playout and the whole ffplayout process has to be restarted. The default timeout is 5000ms, i.e. 5 seconds.
The timeout can be changed in SRS in the respective vhosts with:
The timeout can be heard in SRS in the respective vhosts with:
```NGINX
publish {

View File

@ -1,29 +1,38 @@
### Install ffplayout
**Note:** This is the official and supported way.
ffplayout provides ***.deb** amd ***.rpm** packages, which makes it more easy to install and use, but there is still some steps to do.
ffplayout provides ***.deb** and ***.rpm** packages, which makes it easier to install and use, but there are still some steps to follow.
1. download the latest ffplayout from [release](https://github.com/ffplayout/ffplayout/releases/latest) page.
2. install it with `apt install /tmp/ffplayout_<VERSION>_amd64.deb`
3. install ffmpeg/ffprobe, or compile and copy it to **/usr/local/bin/**
4. activate systemd services:
- `systemctl enable ffplayout`
- `systemctl enable --now ffpapi`
5. add admin user to ffpapi:
- `ffpapi -a`
6. use a revers proxy for SSL, Port is **8787**.
7. login with your browser, address without proxy would be: **http://[IP ADDRESS]:8787**
1. Download the latest ffplayout from the [release](https://github.com/ffplayout/ffplayout/releases/latest) page and place the package in the **/tmp** folder
2. Install it with `apt install /tmp/ffplayout_<VERSION>_amd64.deb`
3. Install ffmpeg/ffprobe, or compile and copy them to **/usr/local/bin/**
4. Initialize the defaults and add a global admin user: `sudo -u ffpu ffplayout -i`
5. Use a reverse proxy for SSL; the port is **8787**
6. Log in with your browser. The address without a proxy would be: **http://[IP ADDRESS]:8787**
Default location for playlists and media files are: **/var/lib/ffplayout/**. If you need to change them, the media storage folder needs a symlink to **/usr/share/ffplayout/public/**.
When you don't need the frontend and API, skip enable the systemd service **ffpapi**.
When playlists are created and the ffplayout output is configured, you can start the process: `systemctl start ffplayout`, or click start in frontend.
If you want to configure ffplayout over terminal, you can edit **/etc/ffplayout/ffplayout.yml**.
### Manual Install
-----
**Note:** This is for advanced users only.
- Install ffmpeg/ffprobe, or compile and copy them to **/usr/local/bin/**
- Download the latest archive from the [release](https://github.com/ffplayout/ffplayout/releases/latest) page
- Copy the ffplayout binary to `/usr/bin/`
- Copy **assets/ffplayout.yml** to `/etc/ffplayout`
- Create the folder `/var/log/ffplayout`
- Create the system user **ffpu**
- Give ownership of `/etc/ffplayout` and `/var/log/ffplayout` to **ffpu**
- Copy **assets/ffplayout.service** to `/etc/systemd/system`
- Copy **assets/ffplayout.1.gz** to `/usr/share/man/man1/`
- Copy the **public** folder to `/usr/share/ffplayout/`
- Activate the service and run it: `systemctl enable --now ffplayout`
- Initialize the defaults and add a global admin user: `sudo -u ffpu ffplayout -i`
- install ffmpeg/ffprobe, or compile and copy it to **/usr/local/bin/**
- download the latest archive from [release](https://github.com/ffplayout/ffplayout/releases/latest) page
- copy the ffplayout and ffpapi binary to `/usr/bin/`
- copy **assets/ffplayout.yml** to `/etc/ffplayout`
- create folder `/var/log/ffplayout`
- create system user **ffpu**
- give ownership from `/etc/ffplayout` and `/var/log/ffplayout` to **ffpu**
- copy **assets/ffpapi.service**, **assets/ffplayout.service** and **assets/ffplayout@.service** to `/etc/systemd/system`
- copy **assets/11-ffplayout** to `/etc/sudoers.d/`
- copy **assets/ffpapi.1.gz** and **assets/ffplayout.1.gz** to `/usr/share/man/man1/`
- copy **public** folder to `/usr/share/ffplayout/`
- activate service and run it: `systemctl enable --now ffpapi ffplayout`

View File

@ -1,8 +1,8 @@
### Live Ingest
With live ingest, you have the possibility to switch from playlist or folder mode to a live stream.
With live ingest you have the possibility to switch from playlist, or folder mode to a live stream.
It works by creating an ffmpeg instance in _listen_ (_server_) mode. For example, when streaming over RTMP, you can set the ingest input parameters to:
It works in a way, that it create a ffmpeg instance in _listen_ (_server_) mode. For example when you stream over RTMP to it, you can set the ingest input parameters to:
```
-f live_flv -listen 1 -i rtmp://0.0.0.0:1936/live/my-secrete-streaming-key
@ -14,14 +14,14 @@ For SRT you could use:
-f mpegts -i 'srt://0.0.0.0:40077?mode=listener&passphrase=12345abcde'
```
Keep in mind that the ingest mode **can't** pull from a server; it can only act as its own server and listen for incoming streams.
Have in mind, that the ingest mode **can't** pull from a server, it only can act as its own server and listen for income.
When it detects an incoming stream, it will stop the currently playing content and switch to the live source. The output will not be interrupted, so you will have a continuous output stream.
When it notice a incoming stream, it will stop the current playing and continue the live source. The output will not interrupt, so you have a continuously output stream.
In rare cases, it may happen that, for a short moment after switching, the image freezes, but then it will continue. Also, a brief frame flicker might occur.
In rare cases it can happen, that for a short moment after switching the image freezes, but then it will continue. Also a short frame flickering can happen.
You should know that **ffmpeg, in its current version, has no authentication mechanism and simply listens to the protocol and port (no app and stream name).**
You need to know, that **ffmpeg in current version has no authentication mechanism and it just listen to the protocol and port (no app and stream name).**
ffplayout addresses this issue by monitoring the output from ffmpeg. When the input is **rtmp** and the app or stream name differs from the configuration, it stops the ingest process. So, in a way, we have some control over which streams are accepted and which are not.
ffplayout catches this problem with monitoring the output from ffmpeg. When the input is **rtmp** and the app or stream name differs to the config it stops the ingest process. So in a way we have a bit control, which stream we let come in and which not.
In theory, you can use any [protocol](https://ffmpeg.org/ffmpeg-protocols.html) from ffmpeg that supports a **listen** mode.
In theory you can use every [protocol](https://ffmpeg.org/ffmpeg-protocols.html) from ffmpeg which support a **listen** mode.

View File

@ -2,15 +2,15 @@
**\* This is an experimental feature and more intended for advanced users. Use it with caution!**
With _ffplayout_, you can output streams with multiple audio tracks, with some limitations:
* Not all formats support multiple audio tracks. For example, _flv/rtmp_ doesn't support it.
* In your output parameters, you need to set the correct mapping.
With _ffplayout_ you can output streams with multiple audio tracks, with some limitations:
* Not all formats support multiple audio tracks. For example _flv/rtmp_ doesn't support it.
* In your output parameters you need to set the correct mapping.
ffmpeg filter usage and encoding parameters can become very complex, so it may happen that not every combination works out of the box.
ffmpeg filter usage and encoding parameters can become very complex, so it can happen that not every combination works out of the box.
To get a better idea of what works, you can examine [engine_cmd](../tests/src/engine_cmd.rs).
To get e better idea of what works, you can examine [engin_cmd](../tests/src/engine_cmd.rs).
If you are outputting a single video stream with multiple audio tracks, for example with the `srt://` protocol, you only need to set the correct `audio_tracks:` count in your config under `processing:`.
If you just output a single video stream with multiple audio tracks, let's say with `srt://` protocol, you only need to set in you config under `processing:` the correct `audio_tracks:` count.
For multiple video resolutions and multiple audio tracks, the parameters could look like:

View File

@ -2,11 +2,11 @@ ffplayout supports different types of outputs, let's explain them a bit:
## Stream
The streaming output can be used for any kind of classical streaming, such as **rtmp, srt, rtp**, etc. Any streaming type supported by ffmpeg should work.
The streaming output can be used for ever kind of classical streaming. For example for **rtmp, srt, rtp** etc. Any streaming type supported by ffmpeg should work.
**Remember that you need a streaming server as a destination if you want to use this mode.**
For example, you can use:
You can use for example:
- [SRS](https://github.com/ossrs/srs)
- [OvenMediaEngine](https://www.ovenmediaengine.com/ome)
@ -17,9 +17,9 @@ Of course, you can also use media platforms that support streaming input.
### Multiple Outputs:
ffplayout supports multiple outputs in such a way that it can send the same stream to multiple targets with different encoding settings.
ffplayout supports multiple outputs in a way, that it can output the same stream to multiple targets with different encoding settings.
For example, if you want to stream at different resolutions, you could apply these output parameters:
For example you want to stream different resolutions, you could apply this output parameters:
```YAML
...
@ -58,21 +58,21 @@ For example, if you want to stream at different resolutions, you could apply the
When you are using the text overlay filter, it will apply to all outputs.
The same applies to HLS output.
The same works to for HLS output.
If you want to use different resolutions, you should apply them in order from largest to smallest. Use the largest resolution in the config under `processing:` and the smaller ones in `output_params:`.
If you want to use different resolution, you should apply them in order from biggest to smallest. Use the biggest resolution in config under `processing:` and the smaller ones in `output_params:`.
## Desktop
In desktop mode, you will get your picture on the screen. For this, you need a desktop system; theoretically, all platforms should work here. ffplayout will require **ffplay** for that.
In desktop mode you will get your picture on screen. For this you need a desktop system, theoretical all platforms should work here. ffplayout will need for that **ffplay**.
## HLS
In this mode, you can output directly to an HLS playlist. The nice thing here is that ffplayout requires fewer resources than in streaming mode.
In this mode you can output directly to a hls playlist. The nice thing here is, that ffplayout need less resources then in streaming mode.
HLS output is currently the default, mostly because it works out of the box and doesn't need a streaming target. By default, it saves the segments to **/usr/share/ffplayout/public/live/**.
HLS output is currently the default, mostly because it works out of the box and don't need a streaming target. In default settings it saves the segments to **/usr/share/ffplayout/public/live/**.
**It is recommended to serve the HLS stream with nginx or another web server, and not with ffplayout (which is more meant for previewing).**
**It is recommend to serve the HLS stream with nginx or another web server, and not with ffpapi (which is more meant for previewing).**
**HLS multiple outputs example:**
@ -135,50 +135,6 @@ HLS output is currently the default, mostly because it works out of the box and
The using of **-filter_complex** and *mapping* is very limited, don't use it in situations other then for splitting the outputs.
## Tee Muxer:
#### Activating Output
The tee pseudo-muxer in FFmpeg is crucial in live streaming scenarios where a single input needs to be encoded once and then broadcast to multiple outputs in different formats or protocols. This feature significantly reduces computational overhead and improves efficiency—in my tests, it achieved a 200% reduction in CPU processing expenditure—by eliminating the need for multiple FFmpeg instances or re-encoding the same input multiple times for different outputs.
**FFmpeg's Tee Pseudo-Muxer Parameter Configuration:**
The configuration of the tee pseudo-muxer in FFmpeg allows for the broadcasting of a single input to multiple outputs simultaneously, each with specific settings. This is accomplished by specifying distinct formats and protocols for each output within a single command line, thus minimizing computational load by avoiding re-encoding for each target.
### Parameters and Syntax:
```shell
-c:v libx264
-crf 23
-x264-params keyint=50:min-keyint=25:scenecut=-1
-maxrate 1300k
-bufsize 2600k
-preset faster
-tune zerolatency
-profile:v Main
-level 3.1
-c:a aac
-ar 44100
-b:a 128k
-flags +cgop
-flags +global_header
-f tee
[f=flv:onfail=ignore]rtmp://127.0.0.1:1935/798e3a9e-47b5-4cd5-8079-76a20e03fee6.stream|[f=mpegts:onfail=ignore]udp://127.0.0.1:1234?pkt_size=1316|[f=hls:hls_time=6:hls_list_size=600:hls_flags=append_list+delete_segments+omit_endlist:hls_segment_filename=/usr/share/ffplayout/public/live/stream-%d.ts]/usr/share/ffplayout/public/live/stream.m3u8
```
**1. `-f tee`**: Specifies the use of the tee pseudo-muxer, which facilitates the multiplexing of the broadcast.
**2. Use of “|” (pipe)**: The pipe symbol "|" acts as a separator between the different outputs within the tee command. Each segment separated by a pipe configures a distinct output for the broadcast.
**3. Stream Processing by the Tee**:
- **First Output**: `[f=flv:onfail=ignore]rtmp://127.0.0.1:1935/798e3a9e-47b5-4cd5-8079-76a20e03fee6.stream`
- **f=flv**: Sets the output format to FLV (Flash Video).
- **onfail=ignore**: Directs FFmpeg to continue operating even if this output fails.
- **Second Output**: `[f=mpegts:onfail=ignore]udp://127.0.0.1:1234?pkt_size=1316`
- **f=mpegts**: Sets the output format to MPEG-TS (MPEG Transport Stream).
- **udp://...**: Uses the UDP protocol to send the stream with a specified packet size (`pkt_size=1316`).
- **Third Output**: `[f=hls:hls_time=6:hls_list_size=600:hls_flags=append_list+delete_segments+omit_endlist:hls_segment_filename=/usr/share/ffplayout/public/live/stream-%d.ts]/usr/share/ffplayout/public/live/stream.m3u8`
- **f=hls**: Sets the output format to HLS (HTTP Live Streaming).
Each stream is processed by the tee pseudo-muxer, which encodes the input only once, directing it to various outputs as specified, thereby allowing for efficient and less resource-intensive operation.
To use one of the outputs you need to edit the **ffplayout.yml** config, here under **out** set your **mode** and use the different **output** options.

View File

@ -1,69 +0,0 @@
## Playlist Generation Template
It is possible to generate playlists based on templates. A template could look like:
```JSON
{
"sources": [
{
"start": "00:00:00",
"duration": "02:00:00",
"shuffle": true,
"paths": [
"/path/to/folder/1"
]
},
{
"start": "02:00:00",
"duration": "04:00:00",
"shuffle": false,
"paths": [
"/path/to/folder/2",
"/path/to/folder/3",
"/path/to/folder/4"
]
},
{
"start": "06:00:00",
"duration": "10:00:00",
"shuffle": true,
"paths": [
"/path/to/folder/5"
]
},
{
"start": "16:00:00",
"duration": "06:00:00",
"shuffle": false,
"paths": [
"/path/to/folder/6",
"/path/to/folder/7"
]
},
{
"start": "22:00:00",
"duration": "02:00:00",
"shuffle": true,
"paths": [
"/path/to/folder/8"
]
}
]
}
```
This can be used as file and run through CLI:
```BASH
ffplayout -g 2023-09-04 - 2023-09-10 --template 'path/to/playlist_template.json'
```
Or through API:
```BASH
curl -X POST http://127.0.0.1:8787/api/playlist/1/generate/2023-00-05
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
--data '{"template": {"sources": [\
{"start": "00:00:00", "duration": "10:00:00", "shuffle": true, "paths": ["path/1", "path/2"]}, \
{"start": "10:00:00", "duration": "14:00:00", "shuffle": false, "paths": ["path/3", "path/4"]}]}}'
```

View File

@ -1,12 +1,12 @@
### Preview Stream
When you are using the web frontend, you may wonder how to get a preview in the player. The default installation creates an HLS playlist, and the player uses this, but the HLS mode is not always utilized; instead, the stream output mode is activated.
When you are using the web frontend, maybe you wonder how you get a preview in the player. The default installation creates a HLS playlist and the player using this one, but most of the time the HLS mode is not used, instead the stream output mode is activated.
So if you stream to an external server, you have different options to get a preview stream for your player. The simplest option would be to obtain an m3u8 playlist address from your external target, such as: https://example.org/live/stream.m3u8. You can use this in the configuration section of the frontend.
So if you stream to a external server, you have different options to get a preview stream for you player. The simplest one would be, if you get a m3u8 playlist address from your external target, like: https://example.org/live/stream.m3u8 this you can use in the configuration section from the frontend.
Another option (which has not been tested) is to add an HLS output option to your streaming parameters.
Another option would be (which is not testet), to add a HLS output option to your streaming parameters.
The next option is to install an RTMP server locally and create your preview stream there. In the following lines, this is described in more detail.
The next option can be, that you install a rtmp server locally and create here your preview stream. In the following lines this is described in more detail.
The ffplayout engine has no special preview config parameters, but you can add your settings to the **output_param**, like:
@ -29,11 +29,11 @@ The ffplayout engine has no special preview config parameters, but you can add y
...
```
In this documentation, we assume that you are using [SRS](https://github.com/ossrs/srs) at least for the preview stream. The most stable solution is previewing over HLS, but it is also possible to use [HTTP-FLV](https://github.com/ossrs/srs/wiki/v4_EN_DeliveryHttpStream) for lower latency.
In this documentation we suspect, that you are using [ffplayout-frontend](https://github.com/ffplayout/ffplayout-frontend) and that you using [SRS](https://github.com/ossrs/srs) at least for the preview stream. The most stable solution is previewing over HLS, but it is also possible to use [HTTP-FLV](https://github.com/ossrs/srs/wiki/v4_EN_DeliveryHttpStream) for less latency.
To get this working, we need to follow some steps.
To get this working we have to follow some steps.
#### The first step is to compile and install SRS:
#### First step is to compile and install SRS:
```BASH
# install some tool for compiling
@ -58,7 +58,7 @@ make install
```
Now we need a systemd service to start SRS automatically. Create the file:
Now we need a systemd service, to startup SRS automatically. Create the file:
**/etc/systemd/system/srs.service**
@ -134,11 +134,11 @@ vhost __defaultVhost__ {
```
Now you can enable and start SRS with: `systemctl enable --now srs` and check if it is running: `systemctl status srs`.
Now you can enable and start SRS with: `systemctl enable --now srs` and check if it is running: `systemctl status srs`
#### Configure Nginx
We assume that you have already installed Nginx and are using it for the frontend. Open the frontend config **/etc/nginx/sites-enabled/ffplayout.conf** and add a new location to it:
We assume that you have already installed nginx and you are using it already for the frontend. So open the frontend config **/etc/nginx/sites-enabled/ffplayout.conf** and add a new location to it:
```NGINX
location /live/stream.flv {
@ -192,10 +192,10 @@ server {
}
```
Of course, in production, you should have an HTTPS directive as well, but this step is up to you.
Of course in production you should have a HTTPS directive to, but this step is up to you.
Restart Nginx.
You can (re)start ffplayout, and when you have set everything up correctly, it should run without errors.
You can (re)start ffplayout and when you setup everything correct it should run without errors.
You can now go to your frontend configuration and change the `player_url` to: `http://[domain or IP]/live/stream.flv` or `http://[domain or IP]/live/stream.m3u8`. Save and reload the page. When you go to the player tab, you should see the preview video.
You can go now in your frontend configuration and change the `player_url` to: `http://[domain or IP]/live/stream.flv` or `http://[domain or IP]/live/stream.m3u8`, save and reload the page. When you go now to the player tap you should see the preview video.

View File

@ -1,6 +1,5 @@
### Video from URL
Videos from a URL are videos that you can watch directly in your browser or download. For example:
Videos from URL are videos where you can watch directly in browser or download, for example:
```json
{
@ -11,8 +10,8 @@ Videos from a URL are videos that you can watch directly in your browser or down
}
```
This should work in general because most of the time it has duration information and is faster to play than a real live stream source. Avoid seeking, as it can take too much time.
This should work in general, because most time it have a duration information and it is faster playable then a real live stream source. Avoid seeking because it can take to much time.
**Live streams as input in playlists, such as RTMP, are not supported.**
**Live streams as input in playlist, like rtmp is not supported.**
Be careful with this; it's better to test it multiple times!
Be careful with it, better test it multiple times!

View File

@ -1,10 +0,0 @@
### Stream Copy
ffplayout has supported a stream copy mode. A separate copy mode for video and audio is possible. This mode uses less CPU and RAM but has some drawbacks:
- All files must have exactly the same resolution, framerate, color depth, audio channels, and kHz.
- All files must use the same codecs and settings.
- The video and audio lines of a file must be the same length.
- The codecs and A/V settings must be supported by MPEG-TS and the output destination.
**This mode is experimental and will not have the same stability as the stream mode.**

View File

@ -1,15 +0,0 @@
use static_files::NpmBuild;
fn main() -> std::io::Result<()> {
if !cfg!(debug_assertions) && cfg!(feature = "embed_frontend") {
NpmBuild::new("../frontend")
.install()?
.run("generate")?
.target("../frontend/.output/public")
.change_detection()
.to_resource_dir()
.build()
} else {
Ok(())
}
}

View File

@ -1,60 +0,0 @@
use log::*;
use std::io::Write;
use flexi_logger::writers::{FileLogWriter, LogWriter};
use flexi_logger::{Age, Cleanup, Criterion, DeferredNow, FileSpec, Logger, Naming};
pub fn file_logger() -> Box<dyn LogWriter> {
Box::new(
FileLogWriter::builder(
FileSpec::default()
.suppress_timestamp()
.directory("./logs")
.discriminant("1")
.basename("ffplayout"),
)
.append()
.format(file_formatter)
.rotate(
Criterion::Age(Age::Day),
Naming::TimestampsCustomFormat {
current_infix: Some(""),
format: "%Y-%m-%d",
},
Cleanup::KeepLogFiles(4),
)
.print_message()
.try_build()
.unwrap(),
)
}
fn file_formatter(
w: &mut dyn Write,
now: &mut DeferredNow,
record: &Record,
) -> std::io::Result<()> {
write!(
w,
"[{}] [{:>5}] {}",
now.now().format("%Y-%m-%d %H:%M:%S%.6f"),
record.level(),
record.args()
)
}
fn main() {
Logger::try_with_str("WARN")
.expect("LogSpecification String has errors")
.print_message()
.log_to_stderr()
.add_writer("Alert", file_logger())
.start()
.unwrap();
error!(target : "{Alert,_Default}", "This is error message");
warn!(target : "{Alert,_Default}", "This is a warning");
info!(target : "{Alert,_Default}", "This is an info message");
debug!(target : "{Alert,_Default}", "This is an debug message");
trace!(target : "{Alert,_Default}", "This is an trace message");
}

View File

@ -1,85 +0,0 @@
use flexi_logger::writers::{FileLogWriter, LogWriter};
use flexi_logger::{Age, Cleanup, Criterion, DeferredNow, FileSpec, Naming, Record};
use log::{debug, error, info, kv::Value, trace, warn};
use std::collections::HashMap;
use std::io;
use std::sync::{Arc, Mutex};
struct MultiFileLogger {
writers: Arc<Mutex<HashMap<String, Arc<Mutex<FileLogWriter>>>>>,
}
impl MultiFileLogger {
pub fn new() -> Self {
MultiFileLogger {
writers: Arc::new(Mutex::new(HashMap::new())),
}
}
fn get_writer(&self, channel: &str) -> io::Result<Arc<Mutex<FileLogWriter>>> {
let mut writers = self.writers.lock().unwrap();
if !writers.contains_key(channel) {
let writer = FileLogWriter::builder(
FileSpec::default()
.suppress_timestamp()
.basename("ffplayout"),
)
.append()
.rotate(
Criterion::Age(Age::Day),
Naming::TimestampsCustomFormat {
current_infix: Some(""),
format: "%Y-%m-%d",
},
Cleanup::KeepLogFiles(7),
)
.print_message()
.try_build()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
writers.insert(channel.to_string(), Arc::new(Mutex::new(writer)));
}
Ok(writers.get(channel).unwrap().clone())
}
}
impl LogWriter for MultiFileLogger {
fn write(&self, now: &mut DeferredNow, record: &Record) -> io::Result<()> {
let channel = record
.key_values()
.get("channel".into())
.unwrap_or(Value::null())
.to_string();
let writer = self.get_writer(&channel);
let w = writer?.lock().unwrap().write(now, record);
w
}
fn flush(&self) -> io::Result<()> {
let writers = self.writers.lock().unwrap();
for writer in writers.values() {
writer.lock().unwrap().flush()?;
}
Ok(())
}
}
fn main() {
let logger = MultiFileLogger::new();
flexi_logger::Logger::try_with_str("trace")
.expect("LogSpecification String has errors")
.print_message()
.add_writer("file", Box::new(logger))
.log_to_stderr()
.start()
.unwrap();
trace!(target: "{file}", channel = 1; "This is a trace message for file1");
trace!("This is a trace message for console");
debug!(target: "{file}", channel = 2; "This is a debug message for file2");
info!(target:"{file}", channel = 2; "This is an info message for file2");
warn!(target: "{file}", channel = 1; "This is a warning for file1");
error!(target: "{file}", channel = 2; "This is an error message for file2");
info!("This is a info message for console");
}

File diff suppressed because it is too large Load Diff

View File

@ -1,552 +0,0 @@
use argon2::{
password_hash::{rand_core::OsRng, SaltString},
Argon2, PasswordHasher,
};
use rand::{distributions::Alphanumeric, Rng};
use sqlx::{sqlite::SqliteQueryResult, Pool, Row, Sqlite};
use tokio::task;
use super::models::{AdvancedConfiguration, Configuration};
use crate::db::models::{Channel, GlobalSettings, Role, TextPreset, User};
use crate::utils::{
advanced_config::AdvancedConfig, config::PlayoutConfig, is_running_in_container,
local_utc_offset,
};
pub async fn db_migrate(conn: &Pool<Sqlite>) -> Result<(), Box<dyn std::error::Error>> {
sqlx::migrate!("../migrations").run(conn).await?;
if select_global(conn).await.is_err() {
let secret: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(80)
.map(char::from)
.collect();
let shared = is_running_in_container().await;
let query = "CREATE TRIGGER global_row_count
BEFORE INSERT ON global
WHEN (SELECT COUNT(*) FROM global) >= 1
BEGIN
SELECT RAISE(FAIL, 'Database is already initialized!');
END;
INSERT INTO global(secret, shared) VALUES($1, $2);";
sqlx::query(query)
.bind(secret)
.bind(shared)
.execute(conn)
.await?;
}
Ok(())
}
pub async fn select_global(conn: &Pool<Sqlite>) -> Result<GlobalSettings, sqlx::Error> {
let query =
"SELECT id, secret, logs, playlists, public, storage, shared, mail_smtp, mail_user, mail_password, mail_starttls FROM global WHERE id = 1";
sqlx::query_as(query).fetch_one(conn).await
}
pub async fn update_global(
conn: &Pool<Sqlite>,
global: GlobalSettings,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE global SET logs = $2, playlists = $3, public = $4, storage = $5,
mail_smtp = $6, mail_user = $7, mail_password = $8, mail_starttls = $9 WHERE id = 1";
sqlx::query(query)
.bind(global.id)
.bind(global.logs)
.bind(global.playlists)
.bind(global.public)
.bind(global.storage)
.bind(global.mail_smtp)
.bind(global.mail_user)
.bind(global.mail_password)
.bind(global.mail_starttls)
.execute(conn)
.await
}
pub async fn select_channel(conn: &Pool<Sqlite>, id: &i32) -> Result<Channel, sqlx::Error> {
let query = "SELECT * FROM channels WHERE id = $1";
let mut result: Channel = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
result.utc_offset = local_utc_offset();
Ok(result)
}
pub async fn select_related_channels(
conn: &Pool<Sqlite>,
user_id: Option<i32>,
) -> Result<Vec<Channel>, sqlx::Error> {
let query = match user_id {
Some(id) => format!(
"SELECT c.id, c.name, c.preview_url, c.extra_extensions, c.active, c.public, c.playlists, c.storage, c.last_date, c.time_shift FROM channels c
left join user_channels uc on uc.channel_id = c.id
left join user u on u.id = uc.user_id
WHERE u.id = {id} ORDER BY c.id ASC;"
),
None => "SELECT * FROM channels ORDER BY id ASC;".to_string(),
};
let mut results: Vec<Channel> = sqlx::query_as(&query).fetch_all(conn).await?;
for result in results.iter_mut() {
result.utc_offset = local_utc_offset();
}
Ok(results)
}
pub async fn delete_user_channel(
conn: &Pool<Sqlite>,
user_id: i32,
channel_id: i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM user_channels WHERE user_id = $1 AND channel_id = $2";
sqlx::query(query)
.bind(user_id)
.bind(channel_id)
.execute(conn)
.await
}
pub async fn update_channel(
conn: &Pool<Sqlite>,
id: i32,
channel: Channel,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"UPDATE channels SET name = $2, preview_url = $3, extra_extensions = $4, public = $5, playlists = $6, storage = $7 WHERE id = $1";
sqlx::query(query)
.bind(id)
.bind(channel.name)
.bind(channel.preview_url)
.bind(channel.extra_extensions)
.bind(channel.public)
.bind(channel.playlists)
.bind(channel.storage)
.execute(conn)
.await
}
pub async fn update_stat(
conn: &Pool<Sqlite>,
id: i32,
last_date: String,
time_shift: f64,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE channels SET last_date = $2, time_shift = $3 WHERE id = $1";
sqlx::query(query)
.bind(id)
.bind(last_date)
.bind(time_shift)
.execute(conn)
.await
}
pub async fn update_player(
conn: &Pool<Sqlite>,
id: i32,
active: bool,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE channels SET active = $2 WHERE id = $1";
sqlx::query(query).bind(id).bind(active).execute(conn).await
}
pub async fn insert_channel(conn: &Pool<Sqlite>, channel: Channel) -> Result<Channel, sqlx::Error> {
let query = "INSERT INTO channels (name, preview_url, extra_extensions, public, playlists, storage) VALUES($1, $2, $3, $4, $5, $6)";
let result = sqlx::query(query)
.bind(channel.name)
.bind(channel.preview_url)
.bind(channel.extra_extensions)
.bind(channel.public)
.bind(channel.playlists)
.bind(channel.storage)
.execute(conn)
.await?;
sqlx::query_as("SELECT * FROM channels WHERE id = $1")
.bind(result.last_insert_rowid())
.fetch_one(conn)
.await
}
pub async fn delete_channel(
conn: &Pool<Sqlite>,
id: &i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM channels WHERE id = $1";
sqlx::query(query).bind(id).execute(conn).await
}
pub async fn select_last_channel(conn: &Pool<Sqlite>) -> Result<i32, sqlx::Error> {
let query = "select seq from sqlite_sequence WHERE name = 'channel';";
sqlx::query_scalar(query).fetch_one(conn).await
}
pub async fn select_configuration(
conn: &Pool<Sqlite>,
channel: i32,
) -> Result<Configuration, sqlx::Error> {
let query = "SELECT * FROM configurations WHERE channel_id = $1";
sqlx::query_as(query).bind(channel).fetch_one(conn).await
}
pub async fn insert_configuration(
conn: &Pool<Sqlite>,
channel_id: i32,
output_param: String,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "INSERT INTO configurations (channel_id, output_param) VALUES($1, $2)";
sqlx::query(query)
.bind(channel_id)
.bind(output_param)
.execute(conn)
.await
}
pub async fn update_configuration(
conn: &Pool<Sqlite>,
id: i32,
config: PlayoutConfig,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE configurations SET general_stop_threshold = $2, mail_subject = $3, mail_recipient = $4, mail_level = $5, mail_interval = $6, logging_ffmpeg_level = $7, logging_ingest_level = $8, logging_detect_silence = $9, logging_ignore = $10, processing_mode = $11, processing_audio_only = $12, processing_copy_audio = $13, processing_copy_video = $14, processing_width = $15, processing_height = $16, processing_aspect = $17, processing_fps = $18, processing_add_logo = $19, processing_logo = $20, processing_logo_scale = $21, processing_logo_opacity = $22, processing_logo_position = $23, processing_audio_tracks = $24, processing_audio_track_index = $25, processing_audio_channels = $26, processing_volume = $27, processing_filter = $28, processing_vtt_enable = $29, processing_vtt_dummy = $30, ingest_enable = $31, ingest_param = $32, ingest_filter = $33, playlist_day_start = $34, playlist_length = $35, playlist_infinit = $36, storage_filler = $37, storage_extensions = $38, storage_shuffle = $39, text_add = $40, text_from_filename = $41, text_font = $42, text_style = $43, text_regex = $44, task_enable = $45, task_path = $46, output_mode = $47, output_param = $48 WHERE id = $1";
sqlx::query(query)
.bind(id)
.bind(config.general.stop_threshold)
.bind(config.mail.subject)
.bind(config.mail.recipient)
.bind(config.mail.mail_level.as_str())
.bind(config.mail.interval)
.bind(config.logging.ffmpeg_level)
.bind(config.logging.ingest_level)
.bind(config.logging.detect_silence)
.bind(config.logging.ignore_lines.join(";"))
.bind(config.processing.mode.to_string())
.bind(config.processing.audio_only)
.bind(config.processing.copy_audio)
.bind(config.processing.copy_video)
.bind(config.processing.width)
.bind(config.processing.height)
.bind(config.processing.aspect)
.bind(config.processing.fps)
.bind(config.processing.add_logo)
.bind(config.processing.logo)
.bind(config.processing.logo_scale)
.bind(config.processing.logo_opacity)
.bind(config.processing.logo_position)
.bind(config.processing.audio_tracks)
.bind(config.processing.audio_track_index)
.bind(config.processing.audio_channels)
.bind(config.processing.volume)
.bind(config.processing.custom_filter)
.bind(config.processing.vtt_enable)
.bind(config.processing.vtt_dummy)
.bind(config.ingest.enable)
.bind(config.ingest.input_param)
.bind(config.ingest.custom_filter)
.bind(config.playlist.day_start)
.bind(config.playlist.length)
.bind(config.playlist.infinit)
.bind(config.storage.filler)
.bind(config.storage.extensions.join(";"))
.bind(config.storage.shuffle)
.bind(config.text.add_text)
.bind(config.text.text_from_filename)
.bind(config.text.font)
.bind(config.text.style)
.bind(config.text.regex)
.bind(config.task.enable)
.bind(config.task.path.to_string_lossy().to_string())
.bind(config.output.mode.to_string())
.bind(config.output.output_param)
.execute(conn)
.await
}
pub async fn insert_advanced_configuration(
conn: &Pool<Sqlite>,
channel_id: i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "INSERT INTO advanced_configurations (channel_id) VALUES($1)";
sqlx::query(query).bind(channel_id).execute(conn).await
}
pub async fn update_advanced_configuration(
conn: &Pool<Sqlite>,
channel_id: i32,
config: AdvancedConfig,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE advanced_configurations SET decoder_input_param = $2, decoder_output_param = $3, encoder_input_param = $4, ingest_input_param = $5, filter_deinterlace = $6, filter_pad_scale_w = $7, filter_pad_scale_h = $8, filter_pad_video = $9, filter_fps = $10, filter_scale = $11, filter_set_dar = $12, filter_fade_in = $13, filter_fade_out = $14, filter_overlay_logo_scale = $15, filter_overlay_logo_fade_in = $16, filter_overlay_logo_fade_out = $17, filter_overlay_logo = $18, filter_tpad = $19, filter_drawtext_from_file = $20, filter_drawtext_from_zmq = $21, filter_aevalsrc = $22, filter_afade_in = $23, filter_afade_out = $24, filter_apad = $25, filter_volume = $26, filter_split = $27 WHERE channel_id = $1";
sqlx::query(query)
.bind(channel_id)
.bind(config.decoder.input_param)
.bind(config.decoder.output_param)
.bind(config.encoder.input_param)
.bind(config.ingest.input_param)
.bind(config.filter.deinterlace)
.bind(config.filter.pad_scale_w)
.bind(config.filter.pad_scale_h)
.bind(config.filter.pad_video)
.bind(config.filter.fps)
.bind(config.filter.scale)
.bind(config.filter.set_dar)
.bind(config.filter.fade_in)
.bind(config.filter.fade_out)
.bind(config.filter.overlay_logo_scale)
.bind(config.filter.overlay_logo_fade_in)
.bind(config.filter.overlay_logo_fade_out)
.bind(config.filter.overlay_logo)
.bind(config.filter.tpad)
.bind(config.filter.drawtext_from_file)
.bind(config.filter.drawtext_from_zmq)
.bind(config.filter.aevalsrc)
.bind(config.filter.afade_in)
.bind(config.filter.afade_out)
.bind(config.filter.apad)
.bind(config.filter.volume)
.bind(config.filter.split)
.execute(conn)
.await
}
pub async fn select_advanced_configuration(
conn: &Pool<Sqlite>,
channel: i32,
) -> Result<AdvancedConfiguration, sqlx::Error> {
let query = "SELECT * FROM advanced_configurations WHERE channel_id = $1";
sqlx::query_as(query).bind(channel).fetch_one(conn).await
}
pub async fn select_role(conn: &Pool<Sqlite>, id: &i32) -> Result<Role, sqlx::Error> {
let query = "SELECT name FROM roles WHERE id = $1";
let result: Role = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
Ok(result)
}
pub async fn select_login(conn: &Pool<Sqlite>, user: &str) -> Result<User, sqlx::Error> {
let query =
"SELECT u.id, u.mail, u.username, u.password, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
left join user_channels uc on uc.user_id = u.id
WHERE u.username = $1";
sqlx::query_as(query).bind(user).fetch_one(conn).await
}
pub async fn select_user(conn: &Pool<Sqlite>, id: i32) -> Result<User, sqlx::Error> {
let query = "SELECT u.id, u.mail, u.username, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
left join user_channels uc on uc.user_id = u.id
WHERE u.id = $1";
sqlx::query_as(query).bind(id).fetch_one(conn).await
}
pub async fn select_global_admins(conn: &Pool<Sqlite>) -> Result<Vec<User>, sqlx::Error> {
let query = "SELECT u.id, u.mail, u.username, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
left join user_channels uc on uc.user_id = u.id
WHERE u.role_id = 1";
sqlx::query_as(query).fetch_all(conn).await
}
pub async fn select_users(conn: &Pool<Sqlite>) -> Result<Vec<User>, sqlx::Error> {
let query = "SELECT id, username FROM user";
sqlx::query_as(query).fetch_all(conn).await
}
pub async fn insert_user(conn: &Pool<Sqlite>, user: User) -> Result<(), sqlx::Error> {
let password_hash = task::spawn_blocking(move || {
let salt = SaltString::generate(&mut OsRng);
let hash = Argon2::default()
.hash_password(user.password.clone().as_bytes(), &salt)
.unwrap();
hash.to_string()
})
.await
.unwrap();
let query =
"INSERT INTO user (mail, username, password, role_id) VALUES($1, $2, $3, $4) RETURNING id";
let user_id: i32 = sqlx::query(query)
.bind(user.mail)
.bind(user.username)
.bind(password_hash)
.bind(user.role_id)
.fetch_one(conn)
.await?
.get("id");
if let Some(channel_ids) = user.channel_ids {
insert_user_channel(conn, user_id, channel_ids).await?;
}
Ok(())
}
pub async fn insert_or_update_user(conn: &Pool<Sqlite>, user: User) -> Result<(), sqlx::Error> {
let password_hash = task::spawn_blocking(move || {
let salt = SaltString::generate(&mut OsRng);
let hash = Argon2::default()
.hash_password(user.password.clone().as_bytes(), &salt)
.unwrap();
hash.to_string()
})
.await
.unwrap();
let query = "INSERT INTO user (mail, username, password, role_id) VALUES($1, $2, $3, $4)
ON CONFLICT(username) DO UPDATE SET
mail = excluded.mail, username = excluded.username, password = excluded.password, role_id = excluded.role_id
RETURNING id";
let user_id: i32 = sqlx::query(query)
.bind(user.mail)
.bind(user.username)
.bind(password_hash)
.bind(user.role_id)
.fetch_one(conn)
.await?
.get("id");
if let Some(channel_ids) = user.channel_ids {
insert_user_channel(conn, user_id, channel_ids).await?;
}
Ok(())
}
pub async fn update_user(
conn: &Pool<Sqlite>,
id: i32,
fields: String,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = format!("UPDATE user SET {fields} WHERE id = $1");
sqlx::query(&query).bind(id).execute(conn).await
}
pub async fn insert_user_channel(
conn: &Pool<Sqlite>,
user_id: i32,
channel_ids: Vec<i32>,
) -> Result<(), sqlx::Error> {
for channel in &channel_ids {
let query = "INSERT OR IGNORE INTO user_channels (channel_id, user_id) VALUES ($1, $2);";
sqlx::query(query)
.bind(channel)
.bind(user_id)
.execute(conn)
.await?;
}
Ok(())
}
pub async fn delete_user(conn: &Pool<Sqlite>, id: i32) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM user WHERE id = $1;";
sqlx::query(query).bind(id).execute(conn).await
}
pub async fn select_presets(conn: &Pool<Sqlite>, id: i32) -> Result<Vec<TextPreset>, sqlx::Error> {
let query = "SELECT * FROM presets WHERE channel_id = $1";
sqlx::query_as(query).bind(id).fetch_all(conn).await
}
pub async fn update_preset(
conn: &Pool<Sqlite>,
id: &i32,
preset: TextPreset,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"UPDATE presets SET name = $1, text = $2, x = $3, y = $4, fontsize = $5, line_spacing = $6,
fontcolor = $7, alpha = $8, box = $9, boxcolor = $10, boxborderw = $11 WHERE id = $12";
sqlx::query(query)
.bind(preset.name)
.bind(preset.text)
.bind(preset.x)
.bind(preset.y)
.bind(preset.fontsize)
.bind(preset.line_spacing)
.bind(preset.fontcolor)
.bind(preset.alpha)
.bind(preset.r#box)
.bind(preset.boxcolor)
.bind(preset.boxborderw)
.bind(id)
.execute(conn)
.await
}
pub async fn insert_preset(
conn: &Pool<Sqlite>,
preset: TextPreset,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"INSERT INTO presets (channel_id, name, text, x, y, fontsize, line_spacing, fontcolor, alpha, box, boxcolor, boxborderw)
VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)";
sqlx::query(query)
.bind(preset.channel_id)
.bind(preset.name)
.bind(preset.text)
.bind(preset.x)
.bind(preset.y)
.bind(preset.fontsize)
.bind(preset.line_spacing)
.bind(preset.fontcolor)
.bind(preset.alpha)
.bind(preset.r#box)
.bind(preset.boxcolor)
.bind(preset.boxborderw)
.execute(conn)
.await
}
pub async fn new_channel_presets(
conn: &Pool<Sqlite>,
channel_id: i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "INSERT INTO presets (name, text, x, y, fontsize, line_spacing, fontcolor, box, boxcolor, boxborderw, alpha, channel_id)
VALUES ('Default', 'Welcome to ffplayout messenger!', '(w-text_w)/2', '(h-text_h)/2', '24', '4', '#ffffff@0xff', '0', '#000000@0x80', '4', '1.0', $1),
('Empty Text', '', '0', '0', '24', '4', '#000000', '0', '#000000', '0', '0', $1),
('Bottom Text fade in', 'The upcoming event will be delayed by a few minutes.', '(w-text_w)/2', '(h-line_h)*0.9', '24', '4', '#ffffff', '1', '#000000@0x80', '4', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),0,if(lt(t,ld(1)+2),(t-(ld(1)+1))/1,if(lt(t,ld(1)+8),1,if(lt(t,ld(1)+9),(1-(t-(ld(1)+8)))/1,0))))', $1),
('Scrolling Text', 'We have a very important announcement to make.', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),w+4,w-w/12*mod(t-ld(1),12*(w+tw)/w))', '(h-line_h)*0.9', '24', '4', '#ffffff', '1', '#000000@0x80', '4', '1.0', $1);";
sqlx::query(query).bind(channel_id).execute(conn).await
}
pub async fn delete_preset(
conn: &Pool<Sqlite>,
id: &i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM presets WHERE id = $1;";
sqlx::query(query).bind(id).execute(conn).await
}

View File

@ -1,40 +0,0 @@
use std::io::{stdin, stdout, Write};
use sqlx::{migrate::MigrateDatabase, Pool, Sqlite, SqlitePool};
pub mod handles;
pub mod models;
use crate::utils::db_path;
pub async fn db_pool() -> Result<Pool<Sqlite>, sqlx::Error> {
let db_path = db_path().unwrap();
if !Sqlite::database_exists(db_path).await.unwrap_or(false) {
Sqlite::create_database(db_path).await.unwrap();
}
let conn = SqlitePool::connect(db_path).await?;
Ok(conn)
}
pub async fn db_drop() {
let mut drop_answer = String::new();
print!("Drop Database [Y/n]: ");
stdout().flush().unwrap();
stdin()
.read_line(&mut drop_answer)
.expect("Did not enter a yes or no?");
let drop = drop_answer.trim().to_lowercase().starts_with('y');
if drop {
match Sqlite::drop_database(db_path().unwrap()).await {
Ok(_) => println!("Successfully dropped DB"),
Err(e) => eprintln!("{e}"),
};
};
}

View File

@ -1,434 +0,0 @@
use std::{error::Error, fmt, str::FromStr};
use once_cell::sync::OnceCell;
use regex::Regex;
use serde::{
de::{self, Visitor},
Deserialize, Serialize,
};
// use serde_with::{formats::CommaSeparator, serde_as, StringWithSeparator};
use sqlx::{sqlite::SqliteRow, FromRow, Pool, Row, Sqlite};
use crate::db::handles;
use crate::utils::config::PlayoutConfig;
#[derive(Clone, Default, Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct GlobalSettings {
pub id: i32,
pub secret: Option<String>,
pub logs: String,
pub playlists: String,
pub public: String,
pub storage: String,
pub shared: bool,
pub mail_smtp: String,
pub mail_user: String,
pub mail_password: String,
pub mail_starttls: bool,
}
impl GlobalSettings {
pub async fn new(conn: &Pool<Sqlite>) -> Self {
let global_settings = handles::select_global(conn);
match global_settings.await {
Ok(g) => g,
Err(_) => GlobalSettings {
id: 0,
secret: None,
logs: String::new(),
playlists: String::new(),
public: String::new(),
storage: String::new(),
shared: false,
mail_smtp: String::new(),
mail_user: String::new(),
mail_password: String::new(),
mail_starttls: false,
},
}
}
pub fn global() -> &'static GlobalSettings {
INSTANCE.get().expect("Config is not initialized")
}
}
static INSTANCE: OnceCell<GlobalSettings> = OnceCell::new();
pub async fn init_globales(conn: &Pool<Sqlite>) {
let config = GlobalSettings::new(conn).await;
INSTANCE.set(config).unwrap();
}
#[derive(Clone, Debug, Default, Deserialize, Serialize, sqlx::FromRow)]
pub struct Channel {
#[serde(default = "default_id", skip_deserializing)]
pub id: i32,
pub name: String,
pub preview_url: String,
pub extra_extensions: String,
pub active: bool,
pub public: String,
pub playlists: String,
pub storage: String,
pub last_date: Option<String>,
pub time_shift: f64,
#[sqlx(default)]
#[serde(default)]
pub utc_offset: i32,
}
fn default_id() -> i32 {
1
}
// #[serde_as]
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct User {
#[serde(skip_deserializing)]
pub id: i32,
#[serde(skip_serializing_if = "Option::is_none")]
pub mail: Option<String>,
pub username: String,
#[serde(skip_serializing, default = "empty_string")]
pub password: String,
pub role_id: Option<i32>,
// #[serde_as(as = "StringWithSeparator::<CommaSeparator, i32>")]
pub channel_ids: Option<Vec<i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub token: Option<String>,
}
impl FromRow<'_, SqliteRow> for User {
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
Ok(Self {
id: row.try_get("id").unwrap_or_default(),
mail: row.try_get("mail").unwrap_or_default(),
username: row.try_get("username").unwrap_or_default(),
password: row.try_get("password").unwrap_or_default(),
role_id: row.try_get("role_id").unwrap_or_default(),
channel_ids: Some(
row.try_get::<String, &str>("channel_ids")
.unwrap_or_default()
.split(',')
.map(|i| i.parse::<i32>().unwrap_or_default())
.collect(),
),
token: None,
})
}
}
fn empty_string() -> String {
"".to_string()
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct UserMeta {
pub id: i32,
pub channels: Vec<i32>,
}
impl UserMeta {
pub fn new(id: i32, channels: Vec<i32>) -> Self {
Self { id, channels }
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub enum Role {
GlobalAdmin,
ChannelAdmin,
User,
Guest,
}
impl Role {
pub fn set_role(role: &str) -> Self {
match role {
"global_admin" => Role::GlobalAdmin,
"channel_admin" => Role::ChannelAdmin,
"user" => Role::User,
_ => Role::Guest,
}
}
}
impl FromStr for Role {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
match input {
"global_admin" => Ok(Self::GlobalAdmin),
"channel_admin" => Ok(Self::ChannelAdmin),
"user" => Ok(Self::User),
_ => Ok(Self::Guest),
}
}
}
impl fmt::Display for Role {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::GlobalAdmin => write!(f, "global_admin"),
Self::ChannelAdmin => write!(f, "channel_admin"),
Self::User => write!(f, "user"),
Self::Guest => write!(f, "guest"),
}
}
}
impl<'r> sqlx::decode::Decode<'r, ::sqlx::Sqlite> for Role
where
&'r str: sqlx::decode::Decode<'r, sqlx::Sqlite>,
{
fn decode(
value: sqlx::sqlite::SqliteValueRef<'r>,
) -> Result<Role, Box<dyn Error + 'static + Send + Sync>> {
let value = <&str as sqlx::decode::Decode<sqlx::Sqlite>>::decode(value)?;
Ok(value.parse()?)
}
}
impl FromRow<'_, SqliteRow> for Role {
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
match row.get("name") {
"global_admin" => Ok(Self::GlobalAdmin),
"channel_admin" => Ok(Self::ChannelAdmin),
"user" => Ok(Self::User),
_ => Ok(Self::Guest),
}
}
}
#[derive(Debug, Deserialize, Serialize, Clone, sqlx::FromRow)]
pub struct TextPreset {
#[sqlx(default)]
#[serde(skip_deserializing)]
pub id: i32,
pub channel_id: i32,
pub name: String,
pub text: String,
pub x: String,
pub y: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub fontsize: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub line_spacing: String,
pub fontcolor: String,
pub r#box: String,
pub boxcolor: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub boxborderw: String,
#[serde(deserialize_with = "deserialize_number_or_string")]
pub alpha: String,
}
/// Deserialize number or string
pub fn deserialize_number_or_string<'de, D>(deserializer: D) -> Result<String, D::Error>
where
D: serde::Deserializer<'de>,
{
struct StringOrNumberVisitor;
impl<'de> Visitor<'de> for StringOrNumberVisitor {
type Value = String;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string or a number")
}
fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
let re = Regex::new(r"0,([0-9]+)").unwrap();
let clean_string = re.replace_all(value, "0.$1").to_string();
Ok(clean_string)
}
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Self::Value, E> {
Ok(value.to_string())
}
fn visit_i64<E: de::Error>(self, value: i64) -> Result<Self::Value, E> {
Ok(value.to_string())
}
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
Ok(value.to_string())
}
}
deserializer.deserialize_any(StringOrNumberVisitor)
}
#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct Configuration {
pub id: i32,
pub channel_id: i32,
pub general_stop_threshold: f64,
pub mail_subject: String,
pub mail_recipient: String,
pub mail_level: String,
pub mail_interval: i64,
pub logging_ffmpeg_level: String,
pub logging_ingest_level: String,
pub logging_detect_silence: bool,
#[serde(default)]
pub logging_ignore: String,
pub processing_mode: String,
pub processing_audio_only: bool,
pub processing_copy_audio: bool,
pub processing_copy_video: bool,
pub processing_width: i64,
pub processing_height: i64,
pub processing_aspect: f64,
pub processing_fps: f64,
pub processing_add_logo: bool,
pub processing_logo: String,
pub processing_logo_scale: String,
pub processing_logo_opacity: f64,
pub processing_logo_position: String,
#[serde(default = "default_tracks")]
pub processing_audio_tracks: i32,
#[serde(default = "default_track_index")]
pub processing_audio_track_index: i32,
#[serde(default = "default_channels")]
pub processing_audio_channels: u8,
pub processing_volume: f64,
#[serde(default)]
pub processing_filter: String,
#[serde(default)]
pub processing_vtt_enable: bool,
#[serde(default)]
pub processing_vtt_dummy: Option<String>,
pub ingest_enable: bool,
pub ingest_param: String,
#[serde(default)]
pub ingest_filter: String,
pub playlist_day_start: String,
pub playlist_length: String,
pub playlist_infinit: bool,
pub storage_filler: String,
pub storage_extensions: String,
pub storage_shuffle: bool,
pub text_add: bool,
pub text_from_filename: bool,
pub text_font: String,
pub text_style: String,
pub text_regex: String,
pub task_enable: bool,
pub task_path: String,
pub output_mode: String,
pub output_param: String,
}
impl Configuration {
pub fn from(id: i32, channel_id: i32, config: PlayoutConfig) -> Self {
Self {
id,
channel_id,
general_stop_threshold: config.general.stop_threshold,
mail_subject: config.mail.subject,
mail_recipient: config.mail.recipient,
mail_level: config.mail.mail_level.to_string(),
mail_interval: config.mail.interval,
logging_ffmpeg_level: config.logging.ffmpeg_level,
logging_ingest_level: config.logging.ingest_level,
logging_detect_silence: config.logging.detect_silence,
logging_ignore: config.logging.ignore_lines.join(";"),
processing_mode: config.processing.mode.to_string(),
processing_audio_only: config.processing.audio_only,
processing_audio_track_index: config.processing.audio_track_index,
processing_copy_audio: config.processing.copy_audio,
processing_copy_video: config.processing.copy_video,
processing_width: config.processing.width,
processing_height: config.processing.height,
processing_aspect: config.processing.aspect,
processing_fps: config.processing.fps,
processing_add_logo: config.processing.add_logo,
processing_logo: config.processing.logo,
processing_logo_scale: config.processing.logo_scale,
processing_logo_opacity: config.processing.logo_opacity,
processing_logo_position: config.processing.logo_position,
processing_audio_tracks: config.processing.audio_tracks,
processing_audio_channels: config.processing.audio_channels,
processing_volume: config.processing.volume,
processing_filter: config.processing.custom_filter,
processing_vtt_enable: config.processing.vtt_enable,
processing_vtt_dummy: config.processing.vtt_dummy,
ingest_enable: config.ingest.enable,
ingest_param: config.ingest.input_param,
ingest_filter: config.ingest.custom_filter,
playlist_day_start: config.playlist.day_start,
playlist_length: config.playlist.length,
playlist_infinit: config.playlist.infinit,
storage_filler: config.storage.filler,
storage_extensions: config.storage.extensions.join(";"),
storage_shuffle: config.storage.shuffle,
text_add: config.text.add_text,
text_font: config.text.font,
text_from_filename: config.text.text_from_filename,
text_style: config.text.style,
text_regex: config.text.regex,
task_enable: config.task.enable,
task_path: config.task.path.to_string_lossy().to_string(),
output_mode: config.output.mode.to_string(),
output_param: config.output.output_param,
}
}
}
fn default_track_index() -> i32 {
-1
}
fn default_tracks() -> i32 {
1
}
fn default_channels() -> u8 {
2
}
#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct AdvancedConfiguration {
pub id: i32,
pub channel_id: i32,
pub decoder_input_param: Option<String>,
pub decoder_output_param: Option<String>,
pub encoder_input_param: Option<String>,
pub ingest_input_param: Option<String>,
pub filter_deinterlace: Option<String>,
pub filter_pad_scale_w: Option<String>,
pub filter_pad_scale_h: Option<String>,
pub filter_pad_video: Option<String>,
pub filter_fps: Option<String>,
pub filter_scale: Option<String>,
pub filter_set_dar: Option<String>,
pub filter_fade_in: Option<String>,
pub filter_fade_out: Option<String>,
pub filter_overlay_logo_scale: Option<String>,
pub filter_overlay_logo_fade_in: Option<String>,
pub filter_overlay_logo_fade_out: Option<String>,
pub filter_overlay_logo: Option<String>,
pub filter_tpad: Option<String>,
pub filter_drawtext_from_file: Option<String>,
pub filter_drawtext_from_zmq: Option<String>,
pub filter_aevalsrc: Option<String>,
pub filter_afade_in: Option<String>,
pub filter_afade_out: Option<String>,
pub filter_apad: Option<String>,
pub filter_volume: Option<String>,
pub filter_split: Option<String>,
}

View File

@ -1,47 +0,0 @@
use std::sync::{Arc, Mutex};
use actix_web::{dev::ServiceRequest, Error, HttpMessage};
use actix_web_grants::authorities::AttachAuthorities;
use actix_web_httpauth::extractors::bearer::BearerAuth;
use clap::Parser;
use lazy_static::lazy_static;
use sysinfo::{Disks, Networks, System};
pub mod api;
pub mod db;
pub mod macros;
pub mod player;
pub mod sse;
pub mod utils;
use api::auth;
use db::models::UserMeta;
use utils::advanced_config::AdvancedConfig;
use utils::args_parse::Args;
lazy_static! {
pub static ref ARGS: Args = Args::parse();
pub static ref DISKS: Arc<Mutex<Disks>> =
Arc::new(Mutex::new(Disks::new_with_refreshed_list()));
pub static ref NETWORKS: Arc<Mutex<Networks>> =
Arc::new(Mutex::new(Networks::new_with_refreshed_list()));
pub static ref SYS: Arc<Mutex<System>> = Arc::new(Mutex::new(System::new_all()));
}
pub async fn validator(
req: ServiceRequest,
credentials: BearerAuth,
) -> Result<ServiceRequest, (Error, ServiceRequest)> {
// We just get permissions from JWT
match auth::decode_jwt(credentials.token()).await {
Ok(claims) => {
req.attach(vec![claims.role]);
req.extensions_mut()
.insert(UserMeta::new(claims.id, claims.channels));
Ok(req)
}
Err(e) => Err((e, req)),
}
}

View File

@ -1,286 +0,0 @@
use std::{
collections::HashSet,
fs::File,
io,
process::exit,
sync::{atomic::AtomicBool, Arc, Mutex},
thread,
};
use actix_web::{middleware::Logger, web, App, HttpServer};
use actix_web_httpauth::middleware::HttpAuthentication;
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
use actix_files::Files;
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
use actix_web_static_files::ResourceFiles;
use log::*;
use ffplayout::{
api::routes::*,
db::{db_drop, db_pool, handles, models::init_globales},
player::{
controller::{ChannelController, ChannelManager},
utils::{get_date, is_remote, json_validate::validate_playlist, JsonPlaylist},
},
sse::{broadcast::Broadcaster, routes::*, SseAuthState},
utils::{
args_parse::run_args,
config::get_config,
logging::{init_logging, MailQueue},
playlist::generate_playlist,
},
validator, ARGS,
};
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
use ffplayout::utils::public_path;
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
fn thread_counter() -> usize {
let available_threads = thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(1);
(available_threads / 2).max(2)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let mail_queues = Arc::new(Mutex::new(vec![]));
let pool = db_pool()
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
if let Err(c) = run_args(&pool).await {
exit(c);
}
init_globales(&pool).await;
init_logging(mail_queues.clone())?;
let channel_controllers = Arc::new(Mutex::new(ChannelController::new()));
if let Some(conn) = &ARGS.listen {
let channels = handles::select_related_channels(&pool, None)
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
for channel in channels.iter() {
let config = get_config(&pool, channel.id)
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
let manager = ChannelManager::new(Some(pool.clone()), channel.clone(), config.clone());
let m_queue = Arc::new(Mutex::new(MailQueue::new(channel.id, config.mail)));
channel_controllers
.lock()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
.add(manager.clone());
if let Ok(mut mqs) = mail_queues.lock() {
mqs.push(m_queue.clone());
}
if channel.active {
manager.async_start().await;
}
}
let ip_port = conn.split(':').collect::<Vec<&str>>();
let addr = ip_port[0];
let port = ip_port
.get(1)
.and_then(|p| p.parse::<u16>().ok())
.ok_or(io::Error::new(
io::ErrorKind::InvalidInput,
"<ADRESSE>:<PORT> needed! For example: 127.0.0.1:8787",
))?;
let controllers = web::Data::from(channel_controllers.clone());
let auth_state = web::Data::new(SseAuthState {
uuids: tokio::sync::Mutex::new(HashSet::new()),
});
let broadcast_data = Broadcaster::create();
let thread_count = thread_counter();
info!("Running ffplayout API, listen on http://{conn}");
let db_clone = pool.clone();
// no 'allow origin' here, give it to the reverse proxy
HttpServer::new(move || {
let queues = mail_queues.clone();
let auth = HttpAuthentication::bearer(validator);
let db_pool = web::Data::new(db_clone.clone());
// Customize logging format to get IP though proxies.
let logger = Logger::new("%{r}a \"%r\" %s %b \"%{Referer}i\" \"%{User-Agent}i\" %T")
.exclude_regex(r"/_nuxt/*");
let mut web_app = App::new()
.app_data(db_pool)
.app_data(web::Data::from(queues))
.app_data(controllers.clone())
.app_data(auth_state.clone())
.app_data(web::Data::from(Arc::clone(&broadcast_data)))
.wrap(logger)
.service(login)
.service(
web::scope("/api")
.wrap(auth.clone())
.service(add_user)
.service(get_user)
.service(get_by_name)
.service(get_users)
.service(remove_user)
.service(get_advanced_config)
.service(update_advanced_config)
.service(get_playout_config)
.service(update_playout_config)
.service(add_preset)
.service(get_presets)
.service(update_preset)
.service(delete_preset)
.service(get_channel)
.service(get_all_channels)
.service(patch_channel)
.service(add_channel)
.service(remove_channel)
.service(update_user)
.service(send_text_message)
.service(control_playout)
.service(media_current)
.service(process_control)
.service(get_playlist)
.service(save_playlist)
.service(gen_playlist)
.service(del_playlist)
.service(get_log)
.service(file_browser)
.service(add_dir)
.service(move_rename)
.service(remove)
.service(save_file)
.service(import_playlist)
.service(get_program)
.service(get_system_stat)
.service(generate_uuid),
)
.service(
web::scope("/data")
.service(validate_uuid)
.service(event_stream),
)
.service(get_file)
.service(get_public);
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
{
// in release mode embed frontend
let generated = generate();
web_app =
web_app.service(ResourceFiles::new("/", generated).resolve_not_found_to_root());
}
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
{
// in debug mode get frontend from path
web_app = web_app.service(Files::new("/", public_path()).index_file("index.html"));
}
web_app
})
.bind((addr, port))?
.workers(thread_count)
.run()
.await?;
} else if ARGS.drop_db {
db_drop().await;
} else {
let channels = ARGS.channels.clone().unwrap_or_else(|| vec![1]);
for (index, channel_id) in channels.iter().enumerate() {
let config = match get_config(&pool, *channel_id).await {
Ok(c) => c,
Err(e) => {
eprint!("No config found, channel may not exists!\nOriginal error message: ");
return Err(io::Error::new(io::ErrorKind::Other, e.to_string()));
}
};
let channel = handles::select_channel(&pool, channel_id)
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
let manager = ChannelManager::new(Some(pool.clone()), channel.clone(), config.clone());
if ARGS.foreground {
if ARGS.channels.is_none() {
error!(
"Foreground mode needs at least 1 channel, run with `--channels (1 2 ...)`"
);
exit(1);
}
let m_queue = Arc::new(Mutex::new(MailQueue::new(*channel_id, config.mail)));
channel_controllers
.lock()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
.add(manager.clone());
if let Ok(mut mqs) = mail_queues.lock() {
mqs.push(m_queue.clone());
}
manager.foreground_start(index).await;
} else if ARGS.generate.is_some() {
// run a simple playlist generator and save them to disk
if let Err(e) = generate_playlist(manager) {
error!("{e}");
exit(1);
};
} else if ARGS.validate {
let mut playlist_path = config.channel.playlists.clone();
let start_sec = config.playlist.start_sec.unwrap();
let date = get_date(false, start_sec, false);
if playlist_path.is_dir() || is_remote(&playlist_path.to_string_lossy()) {
let d: Vec<&str> = date.split('-').collect();
playlist_path = playlist_path
.join(d[0])
.join(d[1])
.join(date.clone())
.with_extension("json");
}
let f = File::options()
.read(true)
.write(false)
.open(&playlist_path)?;
let playlist: JsonPlaylist = serde_json::from_reader(f)?;
validate_playlist(
config,
Arc::new(Mutex::new(Vec::new())),
playlist,
Arc::new(AtomicBool::new(false)),
);
} else if !ARGS.init {
error!("Run ffplayout with parameters! Run ffplayout -h for more information.");
}
}
}
for channel_ctl in &channel_controllers.lock().unwrap().channels {
channel_ctl.channel.lock().unwrap().active = false;
channel_ctl.stop_all();
}
pool.close().await;
Ok(())
}

View File

@ -1,431 +0,0 @@
use std::{
fmt, fs,
io::{self, Read},
path::Path,
process::Child,
sync::{
atomic::{AtomicBool, AtomicUsize, Ordering},
Arc, Mutex,
},
thread,
time::Duration,
};
use actix_web::web;
use log::*;
use m3u8_rs::Playlist;
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite};
use walkdir::WalkDir;
use crate::player::{
output::{player, write_hls},
utils::{folder::fill_filler_list, Media},
};
use crate::utils::{
config::{OutputMode::*, PlayoutConfig},
errors::{ProcessError, ServiceError},
};
use crate::ARGS;
use crate::{
db::{handles, models::Channel},
utils::logging::Target,
};
const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Defined process units.
#[derive(Clone, Debug, Default, Copy, Eq, Serialize, Deserialize, PartialEq)]
pub enum ProcessUnit {
#[default]
Decoder,
Encoder,
Ingest,
}
impl fmt::Display for ProcessUnit {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ProcessUnit::Decoder => write!(f, "Decoder"),
ProcessUnit::Encoder => write!(f, "Encoder"),
ProcessUnit::Ingest => write!(f, "Ingest"),
}
}
}
use ProcessUnit::*;
#[derive(Clone, Debug, Default)]
pub struct ChannelManager {
pub db_pool: Option<Pool<Sqlite>>,
pub config: Arc<Mutex<PlayoutConfig>>,
pub channel: Arc<Mutex<Channel>>,
pub decoder: Arc<Mutex<Option<Child>>>,
pub encoder: Arc<Mutex<Option<Child>>>,
pub ingest: Arc<Mutex<Option<Child>>>,
pub ingest_is_running: Arc<AtomicBool>,
pub is_terminated: Arc<AtomicBool>,
pub is_alive: Arc<AtomicBool>,
pub is_processing: Arc<AtomicBool>,
pub filter_chain: Option<Arc<Mutex<Vec<String>>>>,
pub current_date: Arc<Mutex<String>>,
pub list_init: Arc<AtomicBool>,
pub current_media: Arc<Mutex<Option<Media>>>,
pub current_list: Arc<Mutex<Vec<Media>>>,
pub filler_list: Arc<Mutex<Vec<Media>>>,
pub current_index: Arc<AtomicUsize>,
pub filler_index: Arc<AtomicUsize>,
pub run_count: Arc<AtomicUsize>,
}
impl ChannelManager {
pub fn new(db_pool: Option<Pool<Sqlite>>, channel: Channel, config: PlayoutConfig) -> Self {
Self {
db_pool,
is_alive: Arc::new(AtomicBool::new(false)),
channel: Arc::new(Mutex::new(channel)),
config: Arc::new(Mutex::new(config)),
list_init: Arc::new(AtomicBool::new(true)),
current_media: Arc::new(Mutex::new(None)),
current_list: Arc::new(Mutex::new(vec![Media::new(0, "", false)])),
filler_list: Arc::new(Mutex::new(vec![])),
current_index: Arc::new(AtomicUsize::new(0)),
filler_index: Arc::new(AtomicUsize::new(0)),
run_count: Arc::new(AtomicUsize::new(0)),
..Default::default()
}
}
pub fn update_channel(self, other: &Channel) {
let mut channel = self.channel.lock().unwrap();
channel.name.clone_from(&other.name);
channel.preview_url.clone_from(&other.preview_url);
channel.extra_extensions.clone_from(&other.extra_extensions);
channel.active.clone_from(&other.active);
channel.last_date.clone_from(&other.last_date);
channel.time_shift.clone_from(&other.time_shift);
channel.utc_offset.clone_from(&other.utc_offset);
}
pub fn update_config(&self, new_config: PlayoutConfig) {
let mut config = self.config.lock().unwrap();
*config = new_config;
}
pub async fn async_start(&self) {
if !self.is_alive.load(Ordering::SeqCst) {
self.run_count.fetch_add(1, Ordering::SeqCst);
self.is_alive.store(true, Ordering::SeqCst);
self.is_terminated.store(false, Ordering::SeqCst);
self.list_init.store(true, Ordering::SeqCst);
let pool_clone = self.db_pool.clone().unwrap();
let self_clone = self.clone();
let channel_id = self.channel.lock().unwrap().id;
if let Err(e) = handles::update_player(&pool_clone, channel_id, true).await {
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
};
thread::spawn(move || {
let mut run_endless = true;
while run_endless {
let run_count = self_clone.run_count.clone();
if let Err(e) = start_channel(self_clone.clone()) {
run_count.fetch_sub(1, Ordering::SeqCst);
error!("{e}");
};
let active = self_clone.channel.lock().unwrap().active;
if !active {
run_endless = false;
} else {
self_clone.run_count.fetch_add(1, Ordering::SeqCst);
self_clone.is_alive.store(true, Ordering::SeqCst);
self_clone.is_terminated.store(false, Ordering::SeqCst);
self_clone.list_init.store(true, Ordering::SeqCst);
thread::sleep(Duration::from_millis(250));
}
}
trace!("Async start done");
});
}
}
pub async fn foreground_start(&self, index: usize) {
if !self.is_alive.load(Ordering::SeqCst) {
self.run_count.fetch_add(1, Ordering::SeqCst);
self.is_alive.store(true, Ordering::SeqCst);
self.is_terminated.store(false, Ordering::SeqCst);
self.list_init.store(true, Ordering::SeqCst);
let pool_clone = self.db_pool.clone().unwrap();
let self_clone = self.clone();
let channel_id = self.channel.lock().unwrap().id;
if let Err(e) = handles::update_player(&pool_clone, channel_id, true).await {
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
};
if index + 1 == ARGS.channels.clone().unwrap_or_default().len() {
let run_count = self_clone.run_count.clone();
tokio::task::spawn_blocking(move || {
if let Err(e) = start_channel(self_clone) {
run_count.fetch_sub(1, Ordering::SeqCst);
error!("{e}");
}
})
.await
.unwrap();
} else {
thread::spawn(move || {
let run_count = self_clone.run_count.clone();
if let Err(e) = start_channel(self_clone) {
run_count.fetch_sub(1, Ordering::SeqCst);
error!("{e}");
};
});
}
}
}
pub fn stop(&self, unit: ProcessUnit) -> Result<(), ProcessError> {
match unit {
Decoder => {
if let Some(proc) = self.decoder.lock()?.as_mut() {
proc.kill()
.map_err(|e| ProcessError::Custom(format!("Decoder: {e}")))?;
}
}
Encoder => {
if let Some(proc) = self.encoder.lock()?.as_mut() {
proc.kill()
.map_err(|e| ProcessError::Custom(format!("Encoder: {e}")))?;
}
}
Ingest => {
if let Some(proc) = self.ingest.lock()?.as_mut() {
proc.kill()
.map_err(|e| ProcessError::Custom(format!("Ingest: {e}")))?;
}
}
}
self.wait(unit)?;
Ok(())
}
fn run_wait(
&self,
unit: ProcessUnit,
child: &Arc<Mutex<Option<Child>>>,
) -> Result<(), ProcessError> {
if let Some(proc) = child.lock().unwrap().as_mut() {
loop {
match proc.try_wait() {
Ok(Some(_)) => break,
Ok(None) => thread::sleep(Duration::from_millis(10)),
Err(e) => return Err(ProcessError::Custom(format!("{unit}: {e}"))),
}
}
}
Ok(())
}
/// Wait for process to proper close.
/// This prevents orphaned/zombi processes in system
pub fn wait(&self, unit: ProcessUnit) -> Result<(), ProcessError> {
match unit {
Decoder => self.run_wait(unit, &self.decoder)?,
Encoder => self.run_wait(unit, &self.encoder)?,
Ingest => self.run_wait(unit, &self.ingest)?,
}
thread::sleep(Duration::from_millis(50));
Ok(())
}
pub async fn async_stop(&self) -> Result<(), ServiceError> {
let channel_id = self.channel.lock().unwrap().id;
if self.is_alive.load(Ordering::SeqCst) {
debug!(target: Target::all(), channel = channel_id; "Deactivate playout and stop all child processes from channel: <yellow>{channel_id}</>");
}
self.is_terminated.store(true, Ordering::SeqCst);
self.is_alive.store(false, Ordering::SeqCst);
self.ingest_is_running.store(false, Ordering::SeqCst);
self.run_count.fetch_sub(1, Ordering::SeqCst);
let pool = self.db_pool.clone().unwrap();
if let Err(e) = handles::update_player(&pool, channel_id, false).await {
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
};
for unit in [Decoder, Encoder, Ingest] {
let self_clone = self.clone();
if let Err(e) = web::block(move || self_clone.stop(unit)).await? {
if !e.to_string().contains("exited process") {
error!(target: Target::all(), channel = channel_id; "{e}")
}
}
}
Ok(())
}
/// No matter what is running, terminate them all.
pub fn stop_all(&self) {
let channel_id = self.channel.lock().unwrap().id;
if self.is_alive.load(Ordering::SeqCst) {
debug!(target: Target::all(), channel = channel_id; "Stop all child processes from channel: <yellow>{channel_id}</>");
}
self.is_terminated.store(true, Ordering::SeqCst);
self.is_alive.store(false, Ordering::SeqCst);
self.ingest_is_running.store(false, Ordering::SeqCst);
self.run_count.fetch_sub(1, Ordering::SeqCst);
for unit in [Decoder, Encoder, Ingest] {
if let Err(e) = self.stop(unit) {
if !e.to_string().contains("exited process") {
error!(target: Target::all(), channel = channel_id; "{e}")
}
}
}
}
}
#[derive(Clone, Debug, Default)]
pub struct ChannelController {
pub channels: Vec<ChannelManager>,
}
impl ChannelController {
pub fn new() -> Self {
Self { channels: vec![] }
}
pub fn add(&mut self, manager: ChannelManager) {
self.channels.push(manager);
}
pub fn get(&self, id: i32) -> Option<ChannelManager> {
for manager in self.channels.iter() {
if manager.channel.lock().unwrap().id == id {
return Some(manager.clone());
}
}
None
}
pub fn remove(&mut self, channel_id: i32) {
self.channels.retain(|manager| {
let channel = manager.channel.lock().unwrap();
channel.id != channel_id
});
}
pub fn run_count(&self) -> usize {
self.channels
.iter()
.filter(|manager| manager.is_alive.load(Ordering::SeqCst))
.count()
}
}
pub fn start_channel(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock()?.clone();
let mode = config.output.mode.clone();
let filler_list = manager.filler_list.clone();
let channel_id = config.general.channel_id;
drain_hls_path(&config.channel.public)?;
debug!(target: Target::all(), channel = channel_id; "Start ffplayout v{VERSION}, channel: <yellow>{channel_id}</>");
// Fill filler list, can also be a single file.
thread::spawn(move || {
fill_filler_list(&config, Some(filler_list));
});
match mode {
// write files/playlist to HLS m3u8 playlist
HLS => write_hls(manager),
// play on desktop or stream to a remote target
_ => player(manager),
}
}
pub fn drain_hls_path(path: &Path) -> io::Result<()> {
let m3u8_files = find_m3u8_files(path)?;
let mut pl_segments = vec![];
for file in m3u8_files {
let mut file = std::fs::File::open(file).unwrap();
let mut bytes: Vec<u8> = Vec::new();
file.read_to_end(&mut bytes).unwrap();
if let Ok(Playlist::MediaPlaylist(pl)) = m3u8_rs::parse_playlist_res(&bytes) {
for segment in pl.segments {
pl_segments.push(segment.uri);
}
};
}
delete_old_segments(path, &pl_segments)
}
/// Recursively searches for all files with the .m3u8 extension in the specified path.
fn find_m3u8_files(path: &Path) -> io::Result<Vec<String>> {
let mut m3u8_files = Vec::new();
for entry in WalkDir::new(path)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.path().is_file() && e.path().extension().map_or(false, |ext| ext == "m3u8"))
{
m3u8_files.push(entry.path().to_string_lossy().to_string());
}
Ok(m3u8_files)
}
/// Check if segment is in playlist, if not, delete it.
fn delete_old_segments<P: AsRef<Path> + Clone + std::fmt::Debug>(
path: P,
pl_segments: &[String],
) -> io::Result<()> {
for entry in WalkDir::new(path)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
e.path().is_file()
&& e.path()
.extension()
.map_or(false, |ext| ext == "ts" || ext == "vtt")
})
{
let filename = entry.file_name().to_string_lossy().to_string();
if !pl_segments.contains(&filename) {
fs::remove_file(entry.path())?;
}
}
Ok(())
}

View File

@ -1,184 +0,0 @@
use std::{
io::{BufRead, BufReader, Read},
process::{ChildStderr, Command, Stdio},
sync::{atomic::Ordering, mpsc::SyncSender},
thread,
};
use log::*;
use crate::utils::{
config::{PlayoutConfig, FFMPEG_IGNORE_ERRORS, FFMPEG_UNRECOVERABLE_ERRORS},
logging::{log_line, Target},
};
use crate::vec_strings;
use crate::{
player::{
controller::{ChannelManager, ProcessUnit::*},
utils::{is_free_tcp_port, valid_stream, Media},
},
utils::errors::ProcessError,
};
fn server_monitor(
id: i32,
level: &str,
ignore: Vec<String>,
buffer: BufReader<ChildStderr>,
channel_mgr: ChannelManager,
) -> Result<(), ProcessError> {
for line in buffer.lines() {
let line = line?;
if !FFMPEG_IGNORE_ERRORS.iter().any(|i| line.contains(*i))
&& !ignore.iter().any(|i| line.contains(i))
{
log_line(&line, level);
}
if line.contains("rtmp") && line.contains("Unexpected stream") && !valid_stream(&line) {
warn!(target: Target::file_mail(), channel = id; "Unexpected ingest stream: {line}");
if let Err(e) = channel_mgr.stop(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}");
};
}
if FFMPEG_UNRECOVERABLE_ERRORS
.iter()
.any(|i| line.contains(*i))
{
error!(target: Target::file_mail(), channel = id; "Hit unrecoverable error!");
channel_mgr.channel.lock().unwrap().active = false;
channel_mgr.stop_all();
}
}
Ok(())
}
/// ffmpeg Ingest Server
///
/// Start ffmpeg in listen mode, and wait for input.
pub fn ingest_server(
config: PlayoutConfig,
ingest_sender: SyncSender<(usize, [u8; 65088])>,
channel_mgr: ChannelManager,
) -> Result<(), ProcessError> {
let id = config.general.channel_id;
let mut buffer: [u8; 65088] = [0; 65088];
let mut server_cmd = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
let stream_input = config.ingest.input_cmd.clone().unwrap();
let mut dummy_media = Media::new(0, "Live Stream", false);
dummy_media.unit = Ingest;
dummy_media.add_filter(&config, &None);
let is_terminated = channel_mgr.is_terminated.clone();
let ingest_is_running = channel_mgr.ingest_is_running.clone();
let vtt_dummy = config
.channel
.storage
.join(config.processing.vtt_dummy.clone().unwrap_or_default());
if let Some(ingest_input_cmd) = config.advanced.ingest.input_cmd {
server_cmd.append(&mut ingest_input_cmd.clone());
}
server_cmd.append(&mut stream_input.clone());
if config.processing.vtt_enable && vtt_dummy.is_file() {
server_cmd.append(&mut vec_strings!["-i", vtt_dummy.to_string_lossy()]);
}
if let Some(mut filter) = dummy_media.filter {
server_cmd.append(&mut filter.cmd());
server_cmd.append(&mut filter.map());
}
if config.processing.vtt_enable && vtt_dummy.is_file() {
server_cmd.append(&mut vec_strings!("-map", "1:s"));
}
if let Some(mut cmd) = config.processing.cmd {
server_cmd.append(&mut cmd);
}
let mut is_running;
debug!(target: Target::file_mail(), channel = id;
"Server CMD: <bright-blue>\"ffmpeg {}\"</>",
server_cmd.join(" ")
);
if let Some(url) = stream_input.iter().find(|s| s.contains("://")) {
if !is_free_tcp_port(id, url) {
channel_mgr.channel.lock().unwrap().active = false;
channel_mgr.stop_all();
} else {
info!(target: Target::file_mail(), channel = id; "Start ingest server, listening on: <b><magenta>{url}</></b>");
}
};
while !is_terminated.load(Ordering::SeqCst) {
let proc_ctl = channel_mgr.clone();
let level = config.logging.ingest_level.clone();
let ignore = config.logging.ignore_lines.clone();
let mut server_proc = match Command::new("ffmpeg")
.args(server_cmd.clone())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn ingest server: {e}");
panic!("couldn't spawn ingest server: {e}")
}
Ok(proc) => proc,
};
let mut ingest_reader = BufReader::new(server_proc.stdout.take().unwrap());
let server_err = BufReader::new(server_proc.stderr.take().unwrap());
let error_reader_thread =
thread::spawn(move || server_monitor(id, &level, ignore, server_err, proc_ctl));
*channel_mgr.ingest.lock().unwrap() = Some(server_proc);
is_running = false;
loop {
let bytes_len = match ingest_reader.read(&mut buffer[..]) {
Ok(length) => length,
Err(e) => {
debug!(target: Target::file_mail(), channel = id; "Ingest server read {e:?}");
break;
}
};
if !is_running {
ingest_is_running.store(true, Ordering::SeqCst);
is_running = true;
}
if bytes_len > 0 {
if let Err(e) = ingest_sender.send((bytes_len, buffer)) {
error!(target: Target::file_mail(), channel = id; "Ingest server write error: {e:?}");
is_terminated.store(true, Ordering::SeqCst);
break;
}
} else {
break;
}
}
drop(ingest_reader);
ingest_is_running.store(false, Ordering::SeqCst);
if let Err(e) = channel_mgr.wait(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
if let Err(e) = error_reader_thread.join() {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
}
Ok(())
}

View File

@ -1,50 +0,0 @@
use std::thread;
use log::*;
pub mod folder;
pub mod ingest;
pub mod playlist;
pub use folder::watchman;
pub use ingest::ingest_server;
pub use playlist::CurrentProgram;
use crate::player::{
controller::ChannelManager,
utils::{folder::FolderSource, Media},
};
use crate::utils::{config::ProcessMode::*, logging::Target};
/// Create a source iterator from playlist, or from folder.
pub fn source_generator(manager: ChannelManager) -> Box<dyn Iterator<Item = Media>> {
let config = manager.config.lock().unwrap().clone();
let id = config.general.channel_id;
let is_terminated = manager.is_terminated.clone();
let current_list = manager.current_list.clone();
match config.processing.mode {
Folder => {
info!(target: Target::file_mail(), channel = id; "Playout in folder mode");
debug!(target: Target::file_mail(), channel = id;
"Monitor folder: <b><magenta>{:?}</></b>",
config.channel.storage
);
let config_clone = config.clone();
let folder_source = FolderSource::new(&config, manager);
let list_clone = current_list.clone();
// Spawn a thread to monitor folder for file changes.
thread::spawn(move || watchman(config_clone, is_terminated.clone(), list_clone));
Box::new(folder_source) as Box<dyn Iterator<Item = Media>>
}
Playlist => {
info!(target: Target::file_mail(), channel = id; "Playout in playlist mode");
let program = CurrentProgram::new(manager);
Box::new(program) as Box<dyn Iterator<Item = Media>>
}
}
}

View File

@ -1,844 +0,0 @@
use std::{
path::Path,
sync::{
atomic::{AtomicBool, Ordering},
Arc, Mutex,
},
};
use log::*;
use crate::db::handles;
use crate::player::{
controller::ChannelManager,
utils::{
gen_dummy, get_delta, is_close, is_remote,
json_serializer::{read_json, set_defaults},
loop_filler, loop_image, modified_time, seek_and_length, time_in_seconds, JsonPlaylist,
Media, MediaProbe,
},
};
use crate::utils::{
config::{PlayoutConfig, IMAGE_FORMAT},
logging::Target,
};
/// Struct for current playlist.
///
/// Here we prepare the init clip and build a iterator where we pull our clips.
#[derive(Debug)]
pub struct CurrentProgram {
id: i32,
config: PlayoutConfig,
manager: ChannelManager,
start_sec: f64,
end_sec: f64,
json_playlist: JsonPlaylist,
current_node: Media,
is_terminated: Arc<AtomicBool>,
last_json_path: Option<String>,
last_node_ad: bool,
}
/// Prepare a playlist iterator.
impl CurrentProgram {
pub fn new(manager: ChannelManager) -> Self {
let config = manager.config.lock().unwrap().clone();
let is_terminated = manager.is_terminated.clone();
Self {
id: config.general.channel_id,
config: config.clone(),
manager,
start_sec: config.playlist.start_sec.unwrap(),
end_sec: config.playlist.length_sec.unwrap(),
json_playlist: JsonPlaylist::new(
"1970-01-01".to_string(),
config.playlist.start_sec.unwrap(),
),
current_node: Media::new(0, "", false),
is_terminated,
last_json_path: None,
last_node_ad: false,
}
}
// Check if there is no current playlist or file got updated,
// and when is so load/reload it.
fn load_or_update_playlist(&mut self, seek: bool) {
let mut get_current = false;
let mut reload = false;
if let Some(path) = self.json_playlist.path.clone() {
if (Path::new(&path).is_file() || is_remote(&path))
&& self.json_playlist.modified != modified_time(&path)
{
info!(target: Target::file_mail(), channel = self.id; "Reload playlist <b><magenta>{path}</></b>");
self.manager.list_init.store(true, Ordering::SeqCst);
get_current = true;
reload = true;
}
} else {
get_current = true;
}
if get_current {
self.json_playlist = read_json(
&mut self.config,
self.manager.current_list.clone(),
self.json_playlist.path.clone(),
self.is_terminated.clone(),
seek,
false,
);
if !reload {
if let Some(file) = &self.json_playlist.path {
info!(target: Target::file_mail(), channel = self.id; "Read playlist: <b><magenta>{file}</></b>");
}
if *self
.manager
.channel
.lock()
.unwrap()
.last_date
.clone()
.unwrap_or_default()
!= self.json_playlist.date
{
self.set_status(self.json_playlist.date.clone());
}
self.manager
.current_date
.lock()
.unwrap()
.clone_from(&self.json_playlist.date);
}
self.manager
.current_list
.lock()
.unwrap()
.clone_from(&self.json_playlist.program);
if self.json_playlist.path.is_none() {
trace!("missing playlist");
self.current_node = Media::new(0, "", false);
self.manager.list_init.store(true, Ordering::SeqCst);
self.manager.current_index.store(0, Ordering::SeqCst);
}
}
}
// Check if day is past and it is time for a new playlist.
fn check_for_playlist(&mut self, seek: bool) -> bool {
let (delta, total_delta) = get_delta(&self.config, &time_in_seconds());
let mut next = false;
let duration = if self.current_node.duration >= self.current_node.out {
self.current_node.duration
} else {
// maybe out is longer to be able to loop
self.current_node.out
};
let node_index = self.current_node.index.unwrap_or_default();
let mut next_start =
self.current_node.begin.unwrap_or_default() - self.start_sec + duration + delta;
if node_index > 0 && node_index == self.manager.current_list.lock().unwrap().len() - 1 {
next_start += self.config.general.stop_threshold;
}
trace!(
"delta: {delta} | total_delta: {total_delta}, index: {node_index} \n next_start: {next_start} | end_sec: {} | source {}",
self.end_sec,
self.current_node.source
);
// Check if we over the target length or we are close to it, if so we load the next playlist.
if !self.config.playlist.infinit
&& (next_start >= self.end_sec
|| is_close(total_delta, 0.0, 2.0)
|| is_close(total_delta, self.end_sec, 2.0))
{
trace!("get next day");
next = true;
self.json_playlist = read_json(
&mut self.config,
self.manager.current_list.clone(),
None,
self.is_terminated.clone(),
false,
true,
);
if let Some(file) = &self.json_playlist.path {
info!(target: Target::file_mail(), channel = self.id; "Read next playlist: <b><magenta>{file}</></b>");
}
self.manager.list_init.store(false, Ordering::SeqCst);
self.set_status(self.json_playlist.date.clone());
self.manager
.current_list
.lock()
.unwrap()
.clone_from(&self.json_playlist.program);
self.manager.current_index.store(0, Ordering::SeqCst);
} else {
self.load_or_update_playlist(seek)
}
next
}
fn set_status(&mut self, date: String) {
if self.manager.channel.lock().unwrap().last_date != Some(date.clone())
&& self.manager.channel.lock().unwrap().time_shift != 0.0
{
info!(target: Target::file_mail(), channel = self.id; "Reset playout status");
}
self.manager.current_date.lock().unwrap().clone_from(&date);
self.manager
.channel
.lock()
.unwrap()
.last_date
.clone_from(&Some(date.clone()));
self.manager.channel.lock().unwrap().time_shift = 0.0;
let db_pool = self.manager.db_pool.clone().unwrap();
if let Err(e) = tokio::runtime::Runtime::new()
.unwrap()
.block_on(handles::update_stat(
&db_pool,
self.config.general.channel_id,
date,
0.0,
))
{
error!(target: Target::file_mail(), channel = self.id; "Unable to write status: {e}");
};
}
// Check if last and/or next clip is a advertisement.
fn last_next_ad(&mut self, node: &mut Media) {
let index = self.manager.current_index.load(Ordering::SeqCst);
let current_list = self.manager.current_list.lock().unwrap();
if index + 1 < current_list.len() && &current_list[index + 1].category == "advertisement" {
node.next_ad = true;
}
if index > 0
&& index < current_list.len()
&& &current_list[index - 1].category == "advertisement"
{
node.last_ad = true;
}
}
// Get current time and when we are before start time,
// we add full seconds of a day to it.
fn get_current_time(&mut self) -> f64 {
let mut time_sec = time_in_seconds();
if time_sec < self.start_sec {
time_sec += 86400.0 // self.config.playlist.length_sec.unwrap();
}
time_sec
}
// On init or reload we need to seek for the current clip.
fn get_current_clip(&mut self) {
let mut time_sec = self.get_current_time();
let shift = self.manager.channel.lock().unwrap().time_shift;
if shift != 0.0 {
info!(target: Target::file_mail(), channel = self.id; "Shift playlist start for <yellow>{shift:.3}</> seconds");
time_sec += shift;
}
if self.config.playlist.infinit
&& self.json_playlist.length.unwrap() < 86400.0
&& time_sec > self.json_playlist.length.unwrap() + self.start_sec
{
self.recalculate_begin(true)
}
for (i, item) in self.manager.current_list.lock().unwrap().iter().enumerate() {
if item.begin.unwrap() + item.out - item.seek > time_sec {
self.manager.list_init.store(false, Ordering::SeqCst);
self.manager.current_index.store(i, Ordering::SeqCst);
break;
}
}
}
// Prepare init clip.
fn init_clip(&mut self) -> bool {
trace!("init_clip");
self.get_current_clip();
let mut is_filler = false;
if !self.manager.list_init.load(Ordering::SeqCst) {
let time_sec = self.get_current_time();
let index = self.manager.current_index.load(Ordering::SeqCst);
let nodes = self.manager.current_list.lock().unwrap();
let last_index = nodes.len() - 1;
// de-instance node to preserve original values in list
let mut node_clone = nodes[index].clone();
// Important! When no manual drop is happen here, lock is still active in handle_list_init
drop(nodes);
trace!("Clip from init: {}", node_clone.source);
node_clone.seek += time_sec
- (node_clone.begin.unwrap() - self.manager.channel.lock().unwrap().time_shift);
self.last_next_ad(&mut node_clone);
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
self.current_node =
handle_list_init(&self.config, node_clone, &self.manager, last_index);
if self
.current_node
.source
.contains(&self.config.channel.storage.to_string_lossy().to_string())
|| self.current_node.source.contains("color=c=#121212")
{
is_filler = true;
}
}
is_filler
}
fn fill_end(&mut self, total_delta: f64) {
// Fill end from playlist
let index = self.manager.current_index.load(Ordering::SeqCst);
let mut media = Media::new(index, "", false);
media.begin = Some(time_in_seconds());
media.duration = total_delta;
media.out = total_delta;
self.last_next_ad(&mut media);
self.current_node = gen_source(&self.config, media, &self.manager, 0);
self.manager
.current_list
.lock()
.unwrap()
.push(self.current_node.clone());
self.current_node.last_ad = self.last_node_ad;
self.current_node
.add_filter(&self.config, &self.manager.filter_chain);
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
}
fn recalculate_begin(&mut self, extend: bool) {
debug!(target: Target::file_mail(), channel = self.id; "Infinit playlist reaches end, recalculate clip begins. Extend: <yellow>{extend}</>");
let mut time_sec = time_in_seconds();
if extend {
// Calculate the elapsed time since the playlist start
let elapsed_sec = if time_sec >= self.start_sec {
time_sec - self.start_sec
} else {
time_sec + 86400.0 - self.start_sec
};
// Time passed within the current playlist loop
let time_in_current_loop = elapsed_sec % self.json_playlist.length.unwrap();
// Adjust the start time so that the playlist starts at the correct point in time
time_sec -= time_in_current_loop;
}
self.json_playlist.start_sec = Some(time_sec);
set_defaults(&mut self.json_playlist);
self.manager
.current_list
.lock()
.unwrap()
.clone_from(&self.json_playlist.program);
}
}
/// Build the playlist iterator
impl Iterator for CurrentProgram {
type Item = Media;
fn next(&mut self) -> Option<Self::Item> {
self.last_json_path.clone_from(&self.json_playlist.path);
self.last_node_ad = self.current_node.last_ad;
self.check_for_playlist(self.manager.list_init.load(Ordering::SeqCst));
if self.manager.list_init.load(Ordering::SeqCst) {
trace!("Init playlist, from next iterator");
let mut init_clip_is_filler = false;
if self.json_playlist.path.is_some() {
init_clip_is_filler = self.init_clip();
}
if self.manager.list_init.load(Ordering::SeqCst) && !init_clip_is_filler {
// On init load, playlist could be not long enough, or clips are not found
// so we fill the gap with a dummy.
trace!("Init clip is no filler");
let mut current_time = time_in_seconds();
let (_, total_delta) = get_delta(&self.config, &current_time);
if self.start_sec > current_time {
current_time += self.end_sec + 1.0;
}
let mut last_index = 0;
let length = self.manager.current_list.lock().unwrap().len();
if length > 0 {
last_index = length - 1;
}
let mut media = Media::new(length, "", false);
media.begin = Some(current_time);
media.duration = total_delta;
media.out = total_delta;
self.last_next_ad(&mut media);
self.current_node = gen_source(&self.config, media, &self.manager, last_index);
}
return Some(self.current_node.clone());
}
if self.manager.current_index.load(Ordering::SeqCst)
< self.manager.current_list.lock().unwrap().len()
{
// get next clip from current playlist
let mut is_last = false;
let index = self.manager.current_index.load(Ordering::SeqCst);
let node_list = self.manager.current_list.lock().unwrap();
let mut node = node_list[index].clone();
let last_index = node_list.len() - 1;
drop(node_list);
if index == last_index {
is_last = true
}
self.last_next_ad(&mut node);
self.current_node =
timed_source(node, &self.config, is_last, &self.manager, last_index);
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
Some(self.current_node.clone())
} else {
let (_, total_delta) = get_delta(&self.config, &self.start_sec);
if !self.config.playlist.infinit
&& self.last_json_path == self.json_playlist.path
&& total_delta.abs() > 1.0
{
// Playlist is to early finish,
// and if we have to fill it with a placeholder.
trace!("Total delta on list end: {total_delta}");
self.fill_end(total_delta);
return Some(self.current_node.clone());
}
// Get first clip from next playlist.
let c_list = self.manager.current_list.lock().unwrap();
let mut first_node = c_list[0].clone();
drop(c_list);
if self.config.playlist.infinit {
self.recalculate_begin(false)
}
self.manager.current_index.store(0, Ordering::SeqCst);
self.last_next_ad(&mut first_node);
first_node.last_ad = self.last_node_ad;
self.current_node = gen_source(&self.config, first_node, &self.manager, 0);
self.manager.current_index.store(1, Ordering::SeqCst);
Some(self.current_node.clone())
}
}
}
/// Prepare input clip:
///
/// - check begin and length from clip
/// - return clip only if we are in 24 hours time range
fn timed_source(
node: Media,
config: &PlayoutConfig,
last: bool,
manager: &ChannelManager,
last_index: usize,
) -> Media {
let id = config.general.channel_id;
let time_shift = manager.channel.lock().unwrap().time_shift;
let current_date = manager.current_date.lock().unwrap().clone();
let last_date = manager.channel.lock().unwrap().last_date.clone();
let (delta, total_delta) = get_delta(config, &node.begin.unwrap());
let mut shifted_delta = delta;
let mut new_node = node.clone();
new_node.process = Some(false);
trace!(
"Node - begin: {} | source: {}",
node.begin.unwrap(),
node.source
);
trace!(
"timed source is last: {last} | current_date: {current_date} | last_date: {last_date:?} | time_shift: {time_shift}"
);
if config.playlist.length.contains(':') {
if Some(current_date) == last_date && time_shift != 0.0 {
shifted_delta = delta - time_shift;
debug!(target: Target::file_mail(), channel = id; "Delta: <yellow>{shifted_delta:.3}</>, shifted: <yellow>{delta:.3}</>");
} else {
debug!(target: Target::file_mail(), channel = id; "Delta: <yellow>{shifted_delta:.3}</>");
}
if config.general.stop_threshold > 0.0
&& shifted_delta.abs() > config.general.stop_threshold
{
if manager.is_alive.load(Ordering::SeqCst) {
error!(target: Target::file_mail(), channel = id; "Clip begin out of sync for <yellow>{delta:.3}</> seconds.");
}
new_node.cmd = None;
return new_node;
}
}
if (total_delta > node.out - node.seek && !last)
|| node.index.unwrap() < 2
|| !config.playlist.length.contains(':')
|| config.playlist.infinit
{
// when we are in the 24 hour range, get the clip
new_node.process = Some(true);
new_node = gen_source(config, node, manager, last_index);
} else if total_delta <= 0.0 {
info!(target: Target::file_mail(), channel = id; "Begin is over play time, skip: {}", node.source);
} else if total_delta < node.duration - node.seek || last {
new_node = handle_list_end(config, node, total_delta, manager, last_index);
}
new_node
}
fn duplicate_for_seek_and_loop(node: &mut Media, current_list: &Arc<Mutex<Vec<Media>>>) {
let mut nodes = current_list.lock().unwrap();
let index = node.index.unwrap_or_default();
let mut node_duplicate = node.clone();
node_duplicate.seek = 0.0;
let orig_seek = node.seek;
node.out = node.duration;
if node.seek > node.duration {
node.seek %= node.duration;
node_duplicate.out = node_duplicate.out - orig_seek - (node.out - node.seek);
} else {
node_duplicate.out -= node_duplicate.duration;
}
if node.seek == node.out {
node.seek = node_duplicate.seek;
node.out = node_duplicate.out;
} else if node_duplicate.out - node_duplicate.seek > 1.2 {
node_duplicate.begin =
Some(node_duplicate.begin.unwrap_or_default() + (node.out - node.seek));
nodes.insert(index + 1, node_duplicate);
for (i, item) in nodes.iter_mut().enumerate() {
item.index = Some(i);
}
}
}
/// Generate the source CMD, or when clip not exist, get a dummy.
pub fn gen_source(
config: &PlayoutConfig,
mut node: Media,
manager: &ChannelManager,
last_index: usize,
) -> Media {
let node_index = node.index.unwrap_or_default();
let mut duration = node.out - node.seek;
if duration < 1.0 {
warn!(
target: Target::file_mail(), channel = config.general.channel_id;
"Clip is less then 1 second long (<yellow>{duration:.3}</>), adjust length."
);
duration = 1.2;
if node.seek > 1.0 {
node.seek -= 1.2;
} else {
node.out = 1.2;
}
}
trace!("Clip new length: {duration}, duration: {}", node.duration);
if node.probe.is_none() && !node.source.is_empty() {
if let Err(e) = node.add_probe(true) {
trace!("{e:?}");
};
} else {
trace!("Node has a probe...")
}
// separate if condition, because of node.add_probe() in last condition
if node.probe.is_some() {
if node
.source
.rsplit_once('.')
.map(|(_, e)| e.to_lowercase())
.filter(|c| IMAGE_FORMAT.contains(&c.as_str()))
.is_some()
{
node.cmd = Some(loop_image(config, &node));
} else {
if node.seek > 0.0 && node.out > node.duration {
warn!(target: Target::file_mail(), channel = config.general.channel_id; "Clip loops and has seek value: duplicate clip to separate loop and seek.");
duplicate_for_seek_and_loop(&mut node, &manager.current_list);
}
node.cmd = Some(seek_and_length(config, &mut node));
}
} else {
trace!("clip index: {node_index} | last index: {last_index}");
// Last index is the index from the last item from the node list.
if node_index < last_index {
error!(target: Target::file_mail(), channel = config.general.channel_id; "Source not found: <b><magenta>{}</></b>", node.source);
}
let mut fillers = vec![];
match manager.filler_list.try_lock() {
Ok(list) => fillers = list.to_vec(),
Err(e) => {
error!(target: Target::file_mail(), channel = config.general.channel_id; "Lock filler list error: {e}")
}
}
// Set list_init to true, to stay in sync.
manager.list_init.store(true, Ordering::SeqCst);
if config.storage.filler_path.is_dir() && !fillers.is_empty() {
let index = manager.filler_index.fetch_add(1, Ordering::SeqCst);
let mut filler_media = fillers[index].clone();
trace!("take filler: {}", filler_media.source);
if index == fillers.len() - 1 {
// reset index for next round
manager.filler_index.store(0, Ordering::SeqCst)
}
if filler_media.probe.is_none() {
if let Err(e) = filler_media.add_probe(false) {
error!(target: Target::file_mail(), channel = config.general.channel_id; "{e:?}");
};
}
if filler_media.duration > duration {
filler_media.out = duration;
}
node.source = filler_media.source;
node.seek = 0.0;
node.out = filler_media.out;
node.duration = filler_media.duration;
node.cmd = Some(loop_filler(config, &node));
node.probe = filler_media.probe;
} else {
match MediaProbe::new(&config.storage.filler_path.to_string_lossy()) {
Ok(probe) => {
if config
.storage
.filler_path
.to_string_lossy()
.to_string()
.rsplit_once('.')
.map(|(_, e)| e.to_lowercase())
.filter(|c| IMAGE_FORMAT.contains(&c.as_str()))
.is_some()
{
node.source = config
.storage
.filler_path
.clone()
.to_string_lossy()
.to_string();
node.cmd = Some(loop_image(config, &node));
node.probe = Some(probe);
} else if let Some(filler_duration) = probe
.clone()
.format
.duration
.and_then(|d| d.parse::<f64>().ok())
{
// Create placeholder from config filler.
let mut filler_out = filler_duration;
if filler_duration > duration {
filler_out = duration;
}
node.source = config
.storage
.filler_path
.clone()
.to_string_lossy()
.to_string();
node.seek = 0.0;
node.out = filler_out;
node.duration = filler_duration;
node.cmd = Some(loop_filler(config, &node));
node.probe = Some(probe);
} else {
// Create colored placeholder.
let (source, cmd) = gen_dummy(config, duration);
node.source = source;
node.cmd = Some(cmd);
}
}
Err(e) => {
// Create colored placeholder.
error!(target: Target::file_mail(), channel = config.general.channel_id; "Filler error: {e}");
let mut dummy_duration = 60.0;
if dummy_duration > duration {
dummy_duration = duration;
}
let (source, cmd) = gen_dummy(config, dummy_duration);
node.seek = 0.0;
node.out = dummy_duration;
node.duration = dummy_duration;
node.source = source;
node.cmd = Some(cmd);
}
}
}
warn!(
target: Target::file_mail(), channel = config.general.channel_id;
"Generate filler with <yellow>{:.2}</> seconds length!",
node.out
);
}
node.add_filter(config, &manager.filter_chain.clone());
trace!(
"return gen_source: {}, seek: {}, out: {}",
node.source,
node.seek,
node.out,
);
node
}
/// Handle init clip, but this clip can be the last one in playlist,
/// this we have to figure out and calculate the right length.
fn handle_list_init(
config: &PlayoutConfig,
mut node: Media,
manager: &ChannelManager,
last_index: usize,
) -> Media {
debug!(target: Target::file_mail(), channel = config.general.channel_id; "Playlist init");
let (_, total_delta) = get_delta(config, &node.begin.unwrap());
if !config.playlist.infinit && node.out - node.seek > total_delta {
node.out = total_delta + node.seek;
}
gen_source(config, node, manager, last_index)
}
/// when we come to last clip in playlist,
/// or when we reached total playtime,
/// we end up here
fn handle_list_end(
config: &PlayoutConfig,
mut node: Media,
total_delta: f64,
manager: &ChannelManager,
last_index: usize,
) -> Media {
debug!(target: Target::file_mail(), channel = config.general.channel_id; "Last clip from day");
let mut out = if node.seek > 0.0 {
node.seek + total_delta
} else {
if node.duration > total_delta {
warn!(target: Target::file_mail(), channel = config.general.channel_id; "Adjust clip duration to: <yellow>{total_delta:.2}</>");
}
total_delta
};
// out can't be longer then duration
if out > node.duration {
out = node.duration
}
if node.duration > total_delta && total_delta > 1.0 && node.duration - node.seek >= total_delta
{
node.out = out;
} else {
warn!(target: Target::file_mail(), channel = config.general.channel_id; "Playlist is not long enough: <yellow>{total_delta:.2}</> seconds needed");
}
node.process = Some(true);
gen_source(config, node, manager, last_index)
}

View File

@ -1,319 +0,0 @@
/*
This module write the files compression directly to a hls (m3u8) playlist,
without pre- and post-processing.
Example config:
out:
output_param: >-
...
-flags +cgop
-f hls
-hls_time 6
-hls_list_size 600
-hls_flags append_list+delete_segments+omit_endlist+program_date_time
-hls_segment_filename /var/www/html/live/stream-%d.ts /var/www/html/live/stream.m3u8
*/
use std::{
io::{BufRead, BufReader},
process::{Command, Stdio},
sync::atomic::Ordering,
thread::{self, sleep},
time::{Duration, SystemTime},
};
use log::*;
use crate::utils::{logging::log_line, task_runner};
use crate::vec_strings;
use crate::{
player::{
controller::{ChannelManager, ProcessUnit::*},
input::source_generator,
utils::{
get_delta, is_free_tcp_port, prepare_output_cmd, sec_to_time, stderr_reader,
valid_stream, Media,
},
},
utils::{errors::ProcessError, logging::Target},
};
/// Ingest Server for HLS
fn ingest_to_hls_server(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock().unwrap();
let id = config.general.channel_id;
let playlist_init = manager.list_init.clone();
let chain = manager.filter_chain.clone();
let mut error_count = 0;
let mut server_prefix = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
let stream_input = config.ingest.input_cmd.clone().unwrap();
let mut dummy_media = Media::new(0, "Live Stream", false);
dummy_media.unit = Ingest;
let is_terminated = manager.is_terminated.clone();
let ingest_is_running = manager.ingest_is_running.clone();
if let Some(ingest_input_cmd) = &config.advanced.ingest.input_cmd {
server_prefix.append(&mut ingest_input_cmd.clone());
}
server_prefix.append(&mut stream_input.clone());
if config.processing.vtt_enable {
let vtt_dummy = config
.channel
.storage
.join(config.processing.vtt_dummy.clone().unwrap_or_default());
if vtt_dummy.is_file() {
server_prefix.append(&mut vec_strings!["-i", vtt_dummy.to_string_lossy()]);
}
}
let mut is_running;
if let Some(url) = stream_input.iter().find(|s| s.contains("://")) {
if !is_free_tcp_port(id, url) {
manager.channel.lock().unwrap().active = false;
manager.stop_all();
} else {
info!(target: Target::file_mail(), channel = id; "Start ingest server, listening on: <b><magenta>{url}</></b>");
}
};
drop(config);
loop {
let config = manager.config.lock().unwrap().clone();
dummy_media.add_filter(&config, &chain);
let server_cmd = prepare_output_cmd(&config, server_prefix.clone(), &dummy_media.filter);
let timer = SystemTime::now();
debug!(target: Target::file_mail(), channel = id;
"Server CMD: <bright-blue>\"ffmpeg {}\"</>",
server_cmd.join(" ")
);
let proc_ctl = manager.clone();
let mut server_proc = match Command::new("ffmpeg")
.args(server_cmd.clone())
.stderr(Stdio::piped())
.spawn()
{
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn ingest server: {e}");
panic!("couldn't spawn ingest server: {e}");
}
Ok(proc) => proc,
};
let server_err = BufReader::new(server_proc.stderr.take().unwrap());
*manager.ingest.lock().unwrap() = Some(server_proc);
is_running = false;
for line in server_err.lines() {
let line = line?;
if line.contains("rtmp") && line.contains("Unexpected stream") && !valid_stream(&line) {
warn!(target: Target::file_mail(), channel = id; "Unexpected ingest stream: {line}");
if let Err(e) = proc_ctl.stop(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}");
};
}
if !is_running && line.contains("Input #0") {
ingest_is_running.store(true, Ordering::SeqCst);
playlist_init.store(true, Ordering::SeqCst);
is_running = true;
info!(target: Target::file_mail(), channel = id; "Switch from {} to live ingest", config.processing.mode);
if let Err(e) = manager.stop(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}");
}
}
if ingest_is_running.load(Ordering::SeqCst) {
log_line(&line, &config.logging.ingest_level);
} else {
log_line(&line, &config.logging.ffmpeg_level);
}
}
if ingest_is_running.load(Ordering::SeqCst) {
info!(target: Target::file_mail(), channel = id; "Switch from live ingest to {}", config.processing.mode);
}
ingest_is_running.store(false, Ordering::SeqCst);
if let Err(e) = manager.wait(Ingest) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
if is_terminated.load(Ordering::SeqCst) {
break;
}
if let Ok(elapsed) = timer.elapsed() {
if elapsed.as_millis() < 300 {
error_count += 1;
if error_count > 10 {
error!(target: Target::file_mail(), channel = id; "Reach fatal error count in ingest, terminate channel!");
manager.channel.lock().unwrap().active = false;
manager.stop_all();
break;
}
} else {
error_count = 0;
}
}
}
Ok(())
}
/// HLS Writer
///
/// Write with single ffmpeg instance directly to a HLS playlist.
pub fn write_hls(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock()?.clone();
let id = config.general.channel_id;
let current_media = manager.current_media.clone();
let is_terminated = manager.is_terminated.clone();
let ff_log_format = format!("level+{}", config.logging.ffmpeg_level.to_lowercase());
let channel_mgr_2 = manager.clone();
let ingest_is_running = manager.ingest_is_running.clone();
let get_source = source_generator(manager.clone());
// spawn a thread for ffmpeg ingest server and create a channel for package sending
if config.ingest.enable {
thread::spawn(move || ingest_to_hls_server(channel_mgr_2));
}
let mut error_count = 0;
for node in get_source {
*current_media.lock().unwrap() = Some(node.clone());
let ignore = config.logging.ignore_lines.clone();
let timer = SystemTime::now();
if is_terminated.load(Ordering::SeqCst) {
break;
}
let mut cmd = match &node.cmd {
Some(cmd) => cmd.clone(),
None => break,
};
if !node.process.unwrap() {
continue;
}
info!(target: Target::file_mail(), channel = id;
"Play for <yellow>{}</>: <b><magenta>{}</></b>",
sec_to_time(node.out - node.seek),
node.source
);
if config.task.enable {
if config.task.path.is_file() {
let channel_mgr_3 = manager.clone();
thread::spawn(move || task_runner::run(channel_mgr_3));
} else {
error!(target: Target::file_mail(), channel = id;
"<bright-blue>{:?}</> executable not exists!",
config.task.path
);
}
}
let mut dec_prefix = vec_strings!["-hide_banner", "-nostats", "-v", &ff_log_format];
if let Some(decoder_input_cmd) = &config.advanced.decoder.input_cmd {
dec_prefix.append(&mut decoder_input_cmd.clone());
}
let mut read_rate = 1.0;
if let Some(begin) = &node.begin {
let (delta, _) = get_delta(&config, begin);
let duration = node.out - node.seek;
let speed = duration / (duration + delta);
if node.seek == 0.0
&& speed > 0.0
&& speed < 1.3
&& delta < config.general.stop_threshold
{
read_rate = speed;
}
}
dec_prefix.append(&mut vec_strings!["-readrate", read_rate]);
dec_prefix.append(&mut cmd);
let dec_cmd = prepare_output_cmd(&config, dec_prefix, &node.filter);
debug!(target: Target::file_mail(), channel = id;
"HLS writer CMD: <bright-blue>\"ffmpeg {}\"</>",
dec_cmd.join(" ")
);
let mut dec_proc = match Command::new("ffmpeg")
.args(dec_cmd)
.stderr(Stdio::piped())
.spawn()
{
Ok(proc) => proc,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn ffmpeg process: {e}");
panic!("couldn't spawn ffmpeg process: {e}")
}
};
let dec_err = BufReader::new(dec_proc.stderr.take().unwrap());
*manager.decoder.lock().unwrap() = Some(dec_proc);
if let Err(e) = stderr_reader(dec_err, ignore, Decoder, manager.clone()) {
error!(target: Target::file_mail(), channel = id; "{e:?}")
};
if let Err(e) = manager.wait(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}");
}
while ingest_is_running.load(Ordering::SeqCst) {
sleep(Duration::from_secs(1));
}
if let Ok(elapsed) = timer.elapsed() {
if elapsed.as_millis() < 300 {
error_count += 1;
if error_count > 10 {
error!(target: Target::file_mail(), channel = id; "Reach fatal error count, terminate channel!");
break;
}
} else {
error_count = 0;
}
}
}
sleep(Duration::from_secs(1));
manager.stop_all();
Ok(())
}

View File

@ -1,278 +0,0 @@
use std::{
io::{prelude::*, BufReader, BufWriter, Read},
process::{Command, Stdio},
sync::{atomic::Ordering, mpsc::sync_channel},
thread::{self, sleep},
time::{Duration, SystemTime},
};
use log::*;
mod desktop;
mod hls;
mod null;
mod stream;
pub use hls::write_hls;
use crate::player::{
controller::{ChannelManager, ProcessUnit::*},
input::{ingest_server, source_generator},
utils::{sec_to_time, stderr_reader},
};
use crate::utils::{config::OutputMode::*, errors::ProcessError, logging::Target, task_runner};
use crate::vec_strings;
/// Player
///
/// Here we create the input file loop, from playlist, or folder source.
/// Then we read the stdout from the reader ffmpeg instance
/// and write it to the stdin from the streamer ffmpeg instance.
/// If it is configured we also fire up a ffmpeg ingest server instance,
/// for getting live feeds.
/// When a live ingest arrive, it stops the current playing and switch to the live source.
/// When ingest stops, it switch back to playlist/folder mode.
pub fn player(manager: ChannelManager) -> Result<(), ProcessError> {
let config = manager.config.lock()?.clone();
let id = config.general.channel_id;
let config_clone = config.clone();
let ff_log_format = format!("level+{}", config.logging.ffmpeg_level.to_lowercase());
let ignore_enc = config.logging.ignore_lines.clone();
let mut buffer = [0; 65088];
let mut live_on = false;
let playlist_init = manager.list_init.clone();
let is_terminated = manager.is_terminated.clone();
let ingest_is_running = manager.ingest_is_running.clone();
// get source iterator
let node_sources = source_generator(manager.clone());
// get ffmpeg output instance
let mut enc_proc = match config.output.mode {
Desktop => desktop::output(&config, &ff_log_format),
Null => null::output(&config, &ff_log_format),
Stream => stream::output(&config, &ff_log_format),
_ => panic!("Output mode doesn't exists!"),
};
let mut enc_writer = BufWriter::new(enc_proc.stdin.take().unwrap());
let enc_err = BufReader::new(enc_proc.stderr.take().unwrap());
*manager.encoder.lock().unwrap() = Some(enc_proc);
let enc_p_ctl = manager.clone();
// spawn a thread to log ffmpeg output error messages
let error_encoder_thread =
thread::spawn(move || stderr_reader(enc_err, ignore_enc, Encoder, enc_p_ctl));
let channel_mgr_2 = manager.clone();
let mut ingest_receiver = None;
// spawn a thread for ffmpeg ingest server and create a channel for package sending
if config.ingest.enable {
let (ingest_sender, rx) = sync_channel(96);
ingest_receiver = Some(rx);
thread::spawn(move || ingest_server(config_clone, ingest_sender, channel_mgr_2));
}
drop(config);
let mut error_count = 0;
'source_iter: for node in node_sources {
let config = manager.config.lock()?.clone();
*manager.current_media.lock().unwrap() = Some(node.clone());
let ignore_dec = config.logging.ignore_lines.clone();
let timer = SystemTime::now();
if is_terminated.load(Ordering::SeqCst) {
debug!(target: Target::file_mail(), channel = id; "Playout is terminated, break out from source loop");
break;
}
trace!("Decoder CMD: {:?}", node.cmd);
let mut cmd = match &node.cmd {
Some(cmd) => cmd.clone(),
None => break,
};
if !node.process.unwrap() {
// process true/false differs from node.cmd = None in that way,
// that source is valid but to show for playing,
// so better skip it and jump to the next one.
continue;
}
let c_index = if cfg!(debug_assertions) {
format!(
" ({}/{})",
node.index.unwrap() + 1,
manager.current_list.lock().unwrap().len()
)
} else {
String::new()
};
info!(target: Target::file_mail(), channel = id;
"Play for <yellow>{}</>{c_index}: <b><magenta>{} {}</></b>",
sec_to_time(node.out - node.seek),
node.source,
node.audio
);
if config.task.enable {
if config.task.path.is_file() {
let channel_mgr_3 = manager.clone();
thread::spawn(move || task_runner::run(channel_mgr_3));
} else {
error!(target: Target::file_mail(), channel = id;
"<bright-blue>{:?}</> executable not exists!",
config.task.path
);
}
}
let mut dec_cmd = vec_strings!["-hide_banner", "-nostats", "-v", &ff_log_format];
if let Some(decoder_input_cmd) = &config.advanced.decoder.input_cmd {
dec_cmd.append(&mut decoder_input_cmd.clone());
}
dec_cmd.append(&mut cmd);
if let Some(mut filter) = node.filter {
dec_cmd.append(&mut filter.cmd());
dec_cmd.append(&mut filter.map());
}
if config.processing.vtt_enable && dec_cmd.iter().any(|s| s.ends_with(".vtt")) {
let i = dec_cmd
.iter()
.filter(|&n| n == "-i")
.count()
.saturating_sub(1);
dec_cmd.append(&mut vec_strings!("-map", format!("{i}:s"), "-c:s", "copy"));
}
if let Some(mut cmd) = config.processing.cmd.clone() {
dec_cmd.append(&mut cmd);
}
debug!(target: Target::file_mail(), channel = id;
"Decoder CMD: <bright-blue>\"ffmpeg {}\"</>",
dec_cmd.join(" ")
);
// create ffmpeg decoder instance, for reading the input files
let mut dec_proc = match Command::new("ffmpeg")
.args(dec_cmd)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Ok(proc) => proc,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "couldn't spawn decoder process: {e}");
panic!("couldn't spawn decoder process: {e}")
}
};
let mut dec_reader = BufReader::new(dec_proc.stdout.take().unwrap());
let dec_err = BufReader::new(dec_proc.stderr.take().unwrap());
*manager.clone().decoder.lock().unwrap() = Some(dec_proc);
let channel_mgr_c = manager.clone();
let error_decoder_thread =
thread::spawn(move || stderr_reader(dec_err, ignore_dec, Decoder, channel_mgr_c));
loop {
// when server is running, read from it
if ingest_is_running.load(Ordering::SeqCst) {
if !live_on {
info!(target: Target::file_mail(), channel = id; "Switch from {} to live ingest", config.processing.mode);
if let Err(e) = manager.stop(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
live_on = true;
playlist_init.store(true, Ordering::SeqCst);
}
for rx in ingest_receiver.as_ref().unwrap().try_iter() {
if let Err(e) = enc_writer.write(&rx.1[..rx.0]) {
error!(target: Target::file_mail(), channel = id; "Error from Ingest: {:?}", e);
break 'source_iter;
};
}
// read from decoder instance
} else {
if live_on {
info!(target: Target::file_mail(), channel = id; "Switch from live ingest to {}", config.processing.mode);
live_on = false;
break;
}
let dec_bytes_len = match dec_reader.read(&mut buffer[..]) {
Ok(length) => length,
Err(e) => {
error!(target: Target::file_mail(), channel = id; "Reading error from decoder: {e:?}");
break 'source_iter;
}
};
if dec_bytes_len > 0 {
if let Err(e) = enc_writer.write(&buffer[..dec_bytes_len]) {
error!(target: Target::file_mail(), channel = id; "Encoder write error: {}", e.kind());
break 'source_iter;
};
} else {
break;
}
}
}
if let Err(e) = manager.wait(Decoder) {
error!(target: Target::file_mail(), channel = id; "{e}")
}
if let Err(e) = error_decoder_thread.join() {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
if let Ok(elapsed) = timer.elapsed() {
if elapsed.as_millis() < 300 {
error_count += 1;
if error_count > 10 {
error!(target: Target::file_mail(), channel = id; "Reach fatal error count, terminate channel!");
break;
}
} else {
error_count = 0;
}
}
}
trace!("Out of source loop");
sleep(Duration::from_secs(1));
manager.stop_all();
if let Err(e) = error_encoder_thread.join() {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
Ok(())
}

View File

@ -1,231 +0,0 @@
use std::sync::{
atomic::Ordering,
{Arc, Mutex},
};
use lexical_sort::natural_lexical_cmp;
use log::*;
use rand::{seq::SliceRandom, thread_rng};
use walkdir::WalkDir;
use crate::player::{
controller::ChannelManager,
utils::{include_file_extension, time_in_seconds, Media, PlayoutConfig},
};
use crate::utils::logging::Target;
/// Folder Sources
///
/// Like playlist source, we create here a folder list for iterate over it.
#[derive(Debug, Clone)]
pub struct FolderSource {
manager: ChannelManager,
current_node: Media,
}
impl FolderSource {
pub fn new(config: &PlayoutConfig, manager: ChannelManager) -> Self {
let id = config.general.channel_id;
let mut path_list = vec![];
let mut media_list = vec![];
let mut index: usize = 0;
debug!(target: Target::file_mail(), channel = id;
"generate: {:?}, paths: {:?}",
config.general.generate, config.storage.paths
);
if config.general.generate.is_some() && !config.storage.paths.is_empty() {
for path in &config.storage.paths {
path_list.push(path)
}
} else {
path_list.push(&config.channel.storage)
}
for path in &path_list {
if !path.is_dir() {
error!(target: Target::file_mail(), channel = id; "Path not exists: <b><magenta>{path:?}</></b>");
}
for entry in WalkDir::new(path)
.into_iter()
.flat_map(|e| e.ok())
.filter(|f| f.path().is_file())
.filter(|f| include_file_extension(config, f.path()))
{
let media = Media::new(0, &entry.path().to_string_lossy(), false);
media_list.push(media);
}
}
if media_list.is_empty() {
error!(target: Target::file_mail(), channel = id;
"no playable files found under: <b><magenta>{:?}</></b>",
path_list
);
}
if config.storage.shuffle {
info!(target: Target::file_mail(), channel = id; "Shuffle files");
let mut rng = thread_rng();
media_list.shuffle(&mut rng);
} else {
media_list.sort_by(|d1, d2| d1.source.cmp(&d2.source));
}
for item in media_list.iter_mut() {
item.index = Some(index);
index += 1;
}
*manager.current_list.lock().unwrap() = media_list;
Self {
manager,
current_node: Media::new(0, "", false),
}
}
pub fn from_list(manager: &ChannelManager, list: Vec<Media>) -> Self {
*manager.current_list.lock().unwrap() = list;
Self {
manager: manager.clone(),
current_node: Media::new(0, "", false),
}
}
fn shuffle(&mut self) {
let mut rng = thread_rng();
let mut nodes = self.manager.current_list.lock().unwrap();
nodes.shuffle(&mut rng);
for (index, item) in nodes.iter_mut().enumerate() {
item.index = Some(index);
}
}
fn sort(&mut self) {
let mut nodes = self.manager.current_list.lock().unwrap();
nodes.sort_by(|d1, d2| d1.source.cmp(&d2.source));
for (index, item) in nodes.iter_mut().enumerate() {
item.index = Some(index);
}
}
}
/// Create iterator for folder source
impl Iterator for FolderSource {
type Item = Media;
fn next(&mut self) -> Option<Self::Item> {
let config = self.manager.config.lock().unwrap().clone();
let id = config.general.id;
if self.manager.current_index.load(Ordering::SeqCst)
< self.manager.current_list.lock().unwrap().len()
{
let i = self.manager.current_index.load(Ordering::SeqCst);
self.current_node = self.manager.current_list.lock().unwrap()[i].clone();
let _ = self.current_node.add_probe(false).ok();
self.current_node
.add_filter(&config, &self.manager.filter_chain);
self.current_node.begin = Some(time_in_seconds());
self.manager.current_index.fetch_add(1, Ordering::SeqCst);
Some(self.current_node.clone())
} else {
if config.storage.shuffle {
if config.general.generate.is_none() {
info!(target: Target::file_mail(), channel = id; "Shuffle files");
}
self.shuffle();
} else {
if config.general.generate.is_none() {
info!(target: Target::file_mail(), channel = id; "Sort files");
}
self.sort();
}
self.current_node = match self.manager.current_list.lock().unwrap().first() {
Some(m) => m.clone(),
None => return None,
};
let _ = self.current_node.add_probe(false).ok();
self.current_node
.add_filter(&config, &self.manager.filter_chain);
self.current_node.begin = Some(time_in_seconds());
self.manager.current_index.store(1, Ordering::SeqCst);
Some(self.current_node.clone())
}
}
}
pub fn fill_filler_list(
config: &PlayoutConfig,
fillers: Option<Arc<Mutex<Vec<Media>>>>,
) -> Vec<Media> {
let id = config.general.channel_id;
let mut filler_list = vec![];
let filler_path = &config.storage.filler_path;
if filler_path.is_dir() {
for (index, entry) in WalkDir::new(&config.storage.filler_path)
.into_iter()
.flat_map(|e| e.ok())
.filter(|f| f.path().is_file())
.filter(|f| include_file_extension(config, f.path()))
.enumerate()
{
let mut media = Media::new(index, &entry.path().to_string_lossy(), false);
if fillers.is_none() {
if let Err(e) = media.add_probe(false) {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
}
filler_list.push(media);
}
if config.storage.shuffle {
let mut rng = thread_rng();
filler_list.shuffle(&mut rng);
} else {
filler_list.sort_by(|d1, d2| natural_lexical_cmp(&d1.source, &d2.source));
}
for (index, item) in filler_list.iter_mut().enumerate() {
item.index = Some(index);
}
if let Some(f) = fillers.as_ref() {
f.lock().unwrap().clone_from(&filler_list);
}
} else if filler_path.is_file() {
let mut media = Media::new(0, &config.storage.filler_path.to_string_lossy(), false);
if fillers.is_none() {
if let Err(e) = media.add_probe(false) {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
}
filler_list.push(media);
if let Some(f) = fillers.as_ref() {
f.lock().unwrap().clone_from(&filler_list);
}
}
filler_list
}

View File

@ -1,262 +0,0 @@
use std::{
io::{BufRead, BufReader},
process::{Command, Stdio},
sync::{
atomic::{AtomicBool, Ordering},
Arc, Mutex,
},
time::Instant,
};
use log::*;
use regex::Regex;
use crate::player::filter::FilterType::Audio;
use crate::player::utils::{
is_close, is_remote, loop_image, sec_to_time, seek_and_length, JsonPlaylist, Media,
};
use crate::utils::{
config::{OutputMode::Null, PlayoutConfig, FFMPEG_IGNORE_ERRORS, IMAGE_FORMAT},
errors::ProcessError,
logging::Target,
};
use crate::vec_strings;
/// Validate a single media file.
///
/// - Check if file exists
/// - Check if ffmpeg can read the file
/// - Check if Metadata exists
/// - Check if the file is not silent
fn check_media(
mut node: Media,
pos: usize,
begin: f64,
config: &PlayoutConfig,
) -> Result<(), ProcessError> {
let id = config.general.channel_id;
let mut dec_cmd = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
let mut error_list = vec![];
let mut config = config.clone();
config.output.mode = Null;
let mut process_length = 0.1;
if let Some(decoder_input_cmd) = &config.advanced.decoder.input_cmd {
dec_cmd.append(&mut decoder_input_cmd.clone());
}
if config.logging.detect_silence {
process_length = 15.0;
let seek = node.duration / 4.0;
// Seek in file, to prevent false silence detection on intros without sound.
dec_cmd.append(&mut vec_strings!["-ss", seek]);
}
// Take care, that no seek and length command is added.
node.seek = 0.0;
node.out = node.duration;
if node
.source
.rsplit_once('.')
.map(|(_, e)| e.to_lowercase())
.filter(|c| IMAGE_FORMAT.contains(&c.as_str()))
.is_some()
{
node.cmd = Some(loop_image(&config, &node));
} else {
node.cmd = Some(seek_and_length(&config, &mut node));
}
node.add_filter(&config, &None);
let mut filter = node.filter.unwrap_or_default();
if filter.cmd().len() > 1 {
let re_clean = Regex::new(r"volume=[0-9.]+")?;
filter.audio_chain = re_clean
.replace_all(&filter.audio_chain, "anull")
.to_string();
}
filter.add_filter("silencedetect=n=-30dB", 0, Audio);
dec_cmd.append(&mut node.cmd.unwrap_or_default());
dec_cmd.append(&mut filter.cmd());
dec_cmd.append(&mut filter.map());
dec_cmd.append(&mut vec_strings!["-t", process_length, "-f", "null", "-"]);
let mut enc_proc = Command::new("ffmpeg")
.args(dec_cmd)
.stderr(Stdio::piped())
.spawn()?;
let enc_err = BufReader::new(enc_proc.stderr.take().unwrap());
let mut silence_start = 0.0;
let mut silence_end = 0.0;
let re_start = Regex::new(r"silence_start: ([0-9]+:)?([0-9.]+)")?;
let re_end = Regex::new(r"silence_end: ([0-9]+:)?([0-9.]+)")?;
for line in enc_err.lines() {
let line = line?;
if !FFMPEG_IGNORE_ERRORS.iter().any(|i| line.contains(*i))
&& !config.logging.ignore_lines.iter().any(|i| line.contains(i))
&& (line.contains("[error]") || line.contains("[fatal]"))
{
let log_line = line.replace("[error] ", "").replace("[fatal] ", "");
if !error_list.contains(&log_line) {
error_list.push(log_line);
}
}
if config.logging.detect_silence {
if let Some(start) = re_start.captures(&line).and_then(|c| c.get(2)) {
silence_start = start.as_str().parse::<f32>().unwrap_or_default();
}
if let Some(end) = re_end.captures(&line).and_then(|c| c.get(2)) {
silence_end = end.as_str().parse::<f32>().unwrap_or_default() + 0.5;
}
}
}
if silence_end - silence_start > process_length {
error_list.push("Audio is totally silent!".to_string());
}
if !error_list.is_empty() {
error!(target: Target::file_mail(), channel = id;
"<bright black>[Validator]</> ffmpeg error on position <yellow>{pos}</> - {}: <b><magenta>{}</></b>: {}",
sec_to_time(begin),
node.source,
error_list.join("\n")
)
}
error_list.clear();
if let Err(e) = enc_proc.wait() {
error!(target: Target::file_mail(), channel = id; "Validation process: {e:?}");
}
Ok(())
}
/// Validate a given playlist, to check if:
///
/// - the source files are existing
/// - file can be read by ffprobe and metadata exists
/// - total playtime fits target length from config
///
/// This function we run in a thread, to don't block the main function.
pub fn validate_playlist(
mut config: PlayoutConfig,
current_list: Arc<Mutex<Vec<Media>>>,
mut playlist: JsonPlaylist,
is_terminated: Arc<AtomicBool>,
) {
let id = config.general.channel_id;
let date = playlist.date;
if config.text.add_text && !config.text.text_from_filename {
// Turn of drawtext filter with zmq, because its port is needed by the decoder instance.
config.text.add_text = false;
}
let mut length = config.playlist.length_sec.unwrap();
let mut begin = config.playlist.start_sec.unwrap();
length += begin;
debug!(target: Target::file_mail(), channel = id; "Validate playlist from: <yellow>{date}</>");
let timer = Instant::now();
for (index, item) in playlist.program.iter_mut().enumerate() {
if is_terminated.load(Ordering::SeqCst) {
return;
}
let pos = index + 1;
if !is_remote(&item.source) {
if item.audio.is_empty() {
if let Err(e) = item.add_probe(false) {
error!(target: Target::file_mail(), channel = id;
"[Validation] Error on position <yellow>{pos:0>3}</> <yellow>{}</>: {e}",
sec_to_time(begin)
);
}
} else if let Err(e) = item.add_probe(true) {
error!(target: Target::file_mail(), channel = id;
"[Validation] Error on position <yellow>{pos:0>3}</> <yellow>{}</>: {e}",
sec_to_time(begin)
);
}
}
if item.probe.is_some() {
if let Err(e) = check_media(item.clone(), pos, begin, &config) {
error!(target: Target::file_mail(), channel = id; "{e}");
} else if config.general.validate {
debug!(target: Target::file_mail(), channel = id;
"[Validation] Source at <yellow>{}</>, seems fine: <b><magenta>{}</></b>",
sec_to_time(begin),
item.source
)
} else if let Ok(mut list) = current_list.try_lock() {
// Filter out same item in current playlist, then add the probe to it.
// Check also if duration differs with playlist value, log error if so and adjust that value.
list.iter_mut().filter(|list_item| list_item.source == item.source).for_each(|o| {
o.probe.clone_from(&item.probe);
if let Some(dur) =
item.probe.as_ref().and_then(|f| f.format.duration.clone())
{
let probe_duration = dur.parse().unwrap_or_default();
if !is_close(o.duration, probe_duration, 1.2) {
error!(target: Target::file_mail(), channel = id;
"[Validation] File duration (at: <yellow>{}</>) differs from playlist value. File duration: <yellow>{}</>, playlist value: <yellow>{}</>, source <b><magenta>{}</></b>",
sec_to_time(o.begin.unwrap_or_default()), sec_to_time(probe_duration), sec_to_time(o.duration), o.source
);
o.duration = probe_duration;
}
}
if o.audio == item.audio && item.probe_audio.is_some() {
o.probe_audio.clone_from(&item.probe_audio);
o.duration_audio = item.duration_audio;
}
});
}
}
begin += item.out - item.seek;
}
if !config.playlist.infinit && length > begin + 1.2 {
error!(target: Target::file_mail(), channel = id;
"[Validation] Playlist from <yellow>{date}</> not long enough, <yellow>{}</> needed!",
sec_to_time(length - begin),
);
}
if config.general.validate {
info!(target: Target::file_mail(), channel = id;
"[Validation] Playlist length: <yellow>{}</>",
sec_to_time(begin - config.playlist.start_sec.unwrap())
);
}
debug!(target: Target::file_mail(), channel = id;
"Validation done, in <yellow>{:.3?}</>, playlist length: <yellow>{}</> ...",
timer.elapsed(),
sec_to_time(begin - config.playlist.start_sec.unwrap())
);
}

View File

@ -1,155 +0,0 @@
use std::{
sync::{atomic::Ordering, Arc},
time::Duration,
};
use actix_web::{rt::time::interval, web};
use actix_web_lab::{
sse::{self, Sse},
util::InfallibleStream,
};
use parking_lot::Mutex;
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use crate::player::{controller::ChannelManager, utils::get_data_map};
use crate::utils::system;
#[derive(Debug, Clone)]
struct Client {
manager: ChannelManager,
endpoint: String,
sender: mpsc::Sender<sse::Event>,
}
impl Client {
fn new(manager: ChannelManager, endpoint: String, sender: mpsc::Sender<sse::Event>) -> Self {
Self {
manager,
endpoint,
sender,
}
}
}
pub struct Broadcaster {
inner: Mutex<BroadcasterInner>,
}
#[derive(Debug, Clone, Default)]
struct BroadcasterInner {
clients: Vec<Client>,
}
impl Broadcaster {
/// Constructs new broadcaster and spawns ping loop.
pub fn create() -> Arc<Self> {
let this = Arc::new(Broadcaster {
inner: Mutex::new(BroadcasterInner::default()),
});
Broadcaster::spawn_ping(Arc::clone(&this));
this
}
/// Pings clients every 10 seconds to see if they are alive and remove them from the broadcast
/// list if not.
fn spawn_ping(this: Arc<Self>) {
actix_web::rt::spawn(async move {
let mut interval = interval(Duration::from_secs(1));
let mut counter = 0;
loop {
interval.tick().await;
if counter % 10 == 0 {
this.remove_stale_clients().await;
}
this.broadcast_playout().await;
this.broadcast_system().await;
counter = (counter + 1) % 61;
}
});
}
/// Removes all non-responsive clients from broadcast list.
async fn remove_stale_clients(&self) {
let clients = self.inner.lock().clients.clone();
let mut ok_clients = Vec::new();
for client in clients {
if client
.sender
.send(sse::Event::Comment("ping".into()))
.await
.is_ok()
{
ok_clients.push(client.clone());
}
}
self.inner.lock().clients = ok_clients;
}
/// Registers client with broadcaster, returning an SSE response body.
pub async fn new_client(
&self,
manager: ChannelManager,
endpoint: String,
) -> Sse<InfallibleStream<ReceiverStream<sse::Event>>> {
let (tx, rx) = mpsc::channel(10);
tx.send(sse::Data::new("connected").into()).await.unwrap();
self.inner
.lock()
.clients
.push(Client::new(manager, endpoint, tx));
Sse::from_infallible_receiver(rx)
}
/// Broadcasts playout status to clients.
pub async fn broadcast_playout(&self) {
let clients = self.inner.lock().clients.clone();
for client in clients.iter().filter(|client| client.endpoint == "playout") {
let media_map = get_data_map(&client.manager);
if client.manager.is_alive.load(Ordering::SeqCst) {
let _ = client
.sender
.send(
sse::Data::new(serde_json::to_string(&media_map).unwrap_or_default())
.into(),
)
.await;
} else {
let _ = client
.sender
.send(sse::Data::new("not running").into())
.await;
}
}
}
/// Broadcasts system status to clients.
pub async fn broadcast_system(&self) {
let clients = self.inner.lock().clients.clone();
for client in clients {
if &client.endpoint == "system" {
let config = client.manager.config.lock().unwrap().clone();
if let Ok(stat) = web::block(move || system::stat(config.clone())).await {
let stat_string = stat.to_string();
let _ = client.sender.send(sse::Data::new(stat_string).into()).await;
};
}
}
}
}

View File

@ -1,55 +0,0 @@
use std::{
collections::HashSet,
time::{Duration, SystemTime},
};
use tokio::sync::Mutex;
use uuid::Uuid;
use crate::utils::errors::ServiceError;
pub mod broadcast;
pub mod routes;
#[derive(Debug, Eq, Hash, PartialEq, Clone, Copy)]
pub struct UuidData {
pub uuid: Uuid,
pub expiration: SystemTime,
}
impl UuidData {
pub fn new() -> Self {
Self {
uuid: Uuid::new_v4(),
expiration: SystemTime::now() + Duration::from_secs(2 * 3600), // 2 hours
}
}
}
impl Default for UuidData {
fn default() -> Self {
Self::new()
}
}
pub struct SseAuthState {
pub uuids: Mutex<HashSet<UuidData>>,
}
/// Remove all UUIDs from HashSet which are older the expiration time.
pub fn prune_uuids(uuids: &mut HashSet<UuidData>) {
uuids.retain(|entry| entry.expiration > SystemTime::now());
}
pub fn check_uuid(uuids: &mut HashSet<UuidData>, uuid: &str) -> Result<&'static str, ServiceError> {
let client_uuid = Uuid::parse_str(uuid)?;
prune_uuids(uuids);
match uuids.iter().find(|entry| entry.uuid == client_uuid) {
Some(_) => Ok("UUID is valid"),
None => Err(ServiceError::Unauthorized(
"Invalid or expired UUID".to_string(),
)),
}
}

View File

@ -1,88 +0,0 @@
use std::sync::Mutex;
use actix_web::{get, post, web, Responder};
use actix_web_grants::proc_macro::protect;
use serde::{Deserialize, Serialize};
use super::{check_uuid, prune_uuids, SseAuthState, UuidData};
use crate::db::models::Role;
use crate::player::controller::ChannelController;
use crate::sse::broadcast::Broadcaster;
use crate::utils::errors::ServiceError;
#[derive(Deserialize, Serialize)]
struct User {
#[serde(default, skip_serializing)]
endpoint: String,
uuid: String,
}
impl User {
fn new(endpoint: String, uuid: String) -> Self {
Self { endpoint, uuid }
}
}
/// **Get generated UUID**
///
/// ```BASH
/// curl -X GET 'http://127.0.0.1:8787/api/generate-uuid' -H 'Authorization: Bearer <TOKEN>'
/// ```
#[post("/generate-uuid")]
#[protect(
any("Role::GlobalAdmin", "Role::ChannelAdmin", "Role::User"),
ty = "Role"
)]
async fn generate_uuid(data: web::Data<SseAuthState>) -> Result<impl Responder, ServiceError> {
let mut uuids = data.uuids.lock().await;
let new_uuid = UuidData::new();
let user_auth = User::new(String::new(), new_uuid.uuid.to_string());
prune_uuids(&mut uuids);
uuids.insert(new_uuid);
Ok(web::Json(user_auth))
}
/// **Validate UUID**
///
/// ```BASH
/// curl -X GET 'http://127.0.0.1:8787/data/validate?uuid=f2f8c29b-712a-48c5-8919-b535d3a05a3a'
/// ```
#[get("/validate")]
async fn validate_uuid(
data: web::Data<SseAuthState>,
user: web::Query<User>,
) -> Result<impl Responder, ServiceError> {
let mut uuids = data.uuids.lock().await;
match check_uuid(&mut uuids, user.uuid.as_str()) {
Ok(s) => Ok(web::Json(s)),
Err(e) => Err(e),
}
}
/// **Connect to event handler**
///
/// ```BASH
/// curl -X GET 'http://127.0.0.1:8787/data/event/1?endpoint=system&uuid=f2f8c29b-712a-48c5-8919-b535d3a05a3a'
/// ```
#[get("/event/{id}")]
async fn event_stream(
broadcaster: web::Data<Broadcaster>,
data: web::Data<SseAuthState>,
id: web::Path<i32>,
user: web::Query<User>,
controllers: web::Data<Mutex<ChannelController>>,
) -> Result<impl Responder, ServiceError> {
let mut uuids = data.uuids.lock().await;
check_uuid(&mut uuids, user.uuid.as_str())?;
let manager = controllers.lock().unwrap().get(*id).unwrap();
Ok(broadcaster
.new_client(manager.clone(), user.endpoint.clone())
.await)
}

View File

@ -1,306 +0,0 @@
use std::path::Path;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, NoneAsEmptyString};
use shlex::split;
use sqlx::{Pool, Sqlite};
use tokio::io::AsyncReadExt;
use ts_rs::TS;
use crate::db::{handles, models::AdvancedConfiguration};
use crate::utils::ServiceError;
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct AdvancedConfig {
pub decoder: DecoderConfig,
pub encoder: EncoderConfig,
pub filter: FilterConfig,
pub ingest: IngestConfig,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct DecoderConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub input_param: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub output_param: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub input_cmd: Option<Vec<String>>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub output_cmd: Option<Vec<String>>,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct EncoderConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub input_param: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub input_cmd: Option<Vec<String>>,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct IngestConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub input_param: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub input_cmd: Option<Vec<String>>,
}
#[serde_as]
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
#[ts(export, export_to = "advanced_config.d.ts")]
pub struct FilterConfig {
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub deinterlace: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub pad_scale_w: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub pad_scale_h: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub pad_video: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub fps: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub scale: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub set_dar: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub fade_in: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub fade_out: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo_scale: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo_fade_in: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo_fade_out: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub overlay_logo: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub tpad: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub drawtext_from_file: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub drawtext_from_zmq: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub aevalsrc: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub afade_in: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub afade_out: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub apad: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub volume: Option<String>,
#[ts(type = "string")]
#[serde_as(as = "NoneAsEmptyString")]
pub split: Option<String>,
}
impl AdvancedConfig {
pub fn new(config: AdvancedConfiguration) -> Self {
Self {
decoder: DecoderConfig {
input_param: config.decoder_input_param.clone(),
output_param: config.decoder_output_param.clone(),
input_cmd: match config.decoder_input_param {
Some(input_param) => split(&input_param),
None => None,
},
output_cmd: match config.decoder_output_param {
Some(output_param) => split(&output_param),
None => None,
},
},
encoder: EncoderConfig {
input_param: config.encoder_input_param.clone(),
input_cmd: match config.encoder_input_param {
Some(input_param) => split(&input_param),
None => None,
},
},
filter: FilterConfig {
deinterlace: config.filter_deinterlace,
pad_scale_w: config.filter_pad_scale_w,
pad_scale_h: config.filter_pad_scale_h,
pad_video: config.filter_pad_video,
fps: config.filter_fps,
scale: config.filter_scale,
set_dar: config.filter_set_dar,
fade_in: config.filter_fade_in,
fade_out: config.filter_fade_out,
overlay_logo_scale: config.filter_overlay_logo_scale,
overlay_logo_fade_in: config.filter_overlay_logo_fade_in,
overlay_logo_fade_out: config.filter_overlay_logo_fade_out,
overlay_logo: config.filter_overlay_logo,
tpad: config.filter_tpad,
drawtext_from_file: config.filter_drawtext_from_file,
drawtext_from_zmq: config.filter_drawtext_from_zmq,
aevalsrc: config.filter_aevalsrc,
afade_in: config.filter_afade_in,
afade_out: config.filter_afade_out,
apad: config.filter_apad,
volume: config.filter_volume,
split: config.filter_split,
},
ingest: IngestConfig {
input_param: config.ingest_input_param.clone(),
input_cmd: match config.ingest_input_param {
Some(input_param) => split(&input_param),
None => None,
},
},
}
}
pub async fn dump(pool: &Pool<Sqlite>, id: i32) -> Result<(), ServiceError> {
let config = Self::new(handles::select_advanced_configuration(pool, id).await?);
let f_keys = [
"deinterlace",
"pad_scale_w",
"pad_scale_h",
"pad_video",
"fps",
"scale",
"set_dar",
"fade_in",
"fade_out",
"overlay_logo_scale",
"overlay_logo_fade_in",
"overlay_logo_fade_out",
"overlay_logo",
"tpad",
"drawtext_from_file",
"drawtext_from_zmq",
"aevalsrc",
"afade_in",
"afade_out",
"apad",
"volume",
"split",
];
let toml_string = toml_edit::ser::to_string_pretty(&config)?;
let mut doc = toml_string.parse::<toml_edit::DocumentMut>()?;
if let Some(decoder) = doc.get_mut("decoder").and_then(|o| o.as_table_mut()) {
decoder
.decor_mut()
.set_prefix("# Changing these settings is for advanced users only!\n# There will be no support or guarantee that it will be stable after changing them.\n\n");
}
if let Some(output_param) = doc
.get_mut("decoder")
.and_then(|d| d.get_mut("output_param"))
.and_then(|o| o.as_value_mut())
{
output_param
.decor_mut()
.set_suffix(" # get also applied to ingest instance.");
}
if let Some(filter) = doc.get_mut("filter") {
for key in &f_keys {
if let Some(item) = filter.get_mut(*key).and_then(|o| o.as_value_mut()) {
match *key {
"deinterlace" => item.decor_mut().set_suffix(" # yadif=0:-1:0"),
"pad_scale_w" => item.decor_mut().set_suffix(" # scale={}:-1"),
"pad_scale_h" => item.decor_mut().set_suffix(" # scale=-1:{}"),
"pad_video" => item.decor_mut().set_suffix(
" # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2",
),
"fps" => item.decor_mut().set_suffix(" # fps={}"),
"scale" => item.decor_mut().set_suffix(" # scale={}:{}"),
"set_dar" => item.decor_mut().set_suffix(" # setdar=dar={}"),
"fade_in" => item.decor_mut().set_suffix(" # fade=in:st=0:d=0.5"),
"fade_out" => item.decor_mut().set_suffix(" # fade=out:st={}:d=1.0"),
"overlay_logo_scale" => item.decor_mut().set_suffix(" # scale={}"),
"overlay_logo_fade_in" => {
item.decor_mut().set_suffix(" # fade=in:st=0:d=1.0:alpha=1")
}
"overlay_logo_fade_out" => item
.decor_mut()
.set_suffix(" # fade=out:st={}:d=1.0:alpha=1"),
"overlay_logo" => item
.decor_mut()
.set_suffix(" # null[l];[v][l]overlay={}:shortest=1"),
"tpad" => item
.decor_mut()
.set_suffix(" # tpad=stop_mode=add:stop_duration={}"),
"drawtext_from_file" => {
item.decor_mut().set_suffix(" # drawtext=text='{}':{}{}")
}
"drawtext_from_zmq" => item
.decor_mut()
.set_suffix(" # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}"),
"aevalsrc" => item.decor_mut().set_suffix(
" # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000",
),
"afade_in" => item.decor_mut().set_suffix(" # afade=in:st=0:d=0.5"),
"afade_out" => item.decor_mut().set_suffix(" # afade=out:st={}:d=1.0"),
"apad" => item.decor_mut().set_suffix(" # apad=whole_dur={}"),
"volume" => item.decor_mut().set_suffix(" # volume={}"),
"split" => item.decor_mut().set_suffix(" # split={}{}"),
_ => (),
}
}
}
};
tokio::fs::write(&format!("advanced_{id}.toml"), doc.to_string()).await?;
Ok(())
}
pub async fn import(pool: &Pool<Sqlite>, id: i32, path: &Path) -> Result<(), ServiceError> {
if path.is_file() {
let mut file = tokio::fs::File::open(path).await?;
let mut contents = String::new();
file.read_to_string(&mut contents).await?;
let config: Self = toml_edit::de::from_str(&contents).unwrap();
handles::update_advanced_configuration(pool, id, config).await?;
} else {
return Err(ServiceError::BadRequest("Path not exists!".to_string()));
}
Ok(())
}
}

View File

@ -1,595 +0,0 @@
use std::{
io::{stdin, stdout, Write},
path::{Path, PathBuf},
};
#[cfg(target_family = "unix")]
use std::os::unix::fs::MetadataExt;
use clap::Parser;
use rpassword::read_password;
use sqlx::{Pool, Sqlite};
#[cfg(target_family = "unix")]
use tokio::fs;
use crate::db::{
handles,
models::{Channel, User},
};
use crate::utils::{
advanced_config::AdvancedConfig,
config::{OutputMode, PlayoutConfig},
copy_assets,
};
use crate::ARGS;
#[cfg(target_family = "unix")]
use crate::utils::db_path;
#[derive(Parser, Debug, Clone)]
#[clap(version,
about = "ffplayout - 24/7 broadcasting solution",
long_about = Some("ffplayout - 24/7 broadcasting solution\n
Stream dynamic playlists or folder contents with the power of ffmpeg.
The target can be an HLS playlist, rtmp/srt/udp server, desktop player
or any other output supported by ffmpeg.\n
ffplayout also provides a web frontend and API to control streaming,
manage config, files, text overlay, etc."),
next_line_help = false,
)]
pub struct Args {
#[clap(
short,
long,
help_heading = Some("Initial Setup"),
help = "Initialize defaults: global admin, paths, settings, etc."
)]
pub init: bool,
#[clap(short, long, help_heading = Some("Initial Setup"), help = "Create admin user")]
pub username: Option<String>,
#[clap(short, long, help_heading = Some("Initial Setup"), help = "Admin mail address")]
pub mail: Option<String>,
#[clap(short, long, help_heading = Some("Initial Setup"), help = "Admin password")]
pub password: Option<String>,
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Storage root path")]
pub storage: Option<String>,
#[clap(long, env, help_heading = Some("Initial Setup"), help = "SMTP server for system mails")]
pub mail_smtp: Option<String>,
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Mail user for system mails")]
pub mail_user: Option<String>,
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Mail password for system mails")]
pub mail_password: Option<String>,
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Use TLS for system mails")]
pub mail_starttls: bool,
#[clap(long, env, help_heading = Some("Initial Setup / General"), help = "Logging path")]
pub logs: Option<String>,
#[clap(long, env, help_heading = Some("Initial Setup / General"), help = "Path to public files, also HLS playlists")]
pub public: Option<String>,
#[clap(long, help_heading = Some("Initial Setup / Playlist"), help = "Path to playlist, or playlist root folder.")]
pub playlists: Option<String>,
#[clap(long, help_heading = Some("General"), help = "Add or update a global admin use")]
pub user_set: bool,
#[clap(long, env, help_heading = Some("General"), help = "Path to database file")]
pub db: Option<PathBuf>,
#[clap(
long,
help_heading = Some("General"),
help = "Drop database. WARNING: this will delete all configurations!"
)]
pub drop_db: bool,
#[clap(
long,
help_heading = Some("General"),
help = "Dump advanced channel configuration to advanced_{channel}.toml"
)]
pub dump_advanced: bool,
#[clap(long, help_heading = Some("General"), help = "Dump channel configuration to ffplayout_{channel}.toml")]
pub dump_config: bool,
#[clap(
long,
help_heading = Some("General"),
help = "import advanced channel configuration from file."
)]
pub import_advanced: Option<PathBuf>,
#[clap(long, help_heading = Some("General"), help = "import channel configuration from file.")]
pub import_config: Option<PathBuf>,
#[clap(long, help_heading = Some("General"), help = "List available channel ids")]
pub list_channels: bool,
#[clap(short, env, long, help_heading = Some("General"), help = "Listen on IP:PORT, like: 127.0.0.1:8787")]
pub listen: Option<String>,
#[clap(
long,
env,
help_heading = Some("General"),
help = "Override logging level: trace, debug, println, warn, eprintln"
)]
pub log_level: Option<String>,
#[clap(long, env, help_heading = Some("General"), help = "Log to console")]
pub log_to_console: bool,
#[clap(
short,
long,
env,
help_heading = Some("General / Playout"),
help = "Channels by ids to process (for export config, generate playlist, foreground running, etc.)",
num_args = 1..,
)]
pub channels: Option<Vec<i32>>,
#[clap(
short,
long,
help_heading = Some("Playlist"),
help = "Generate playlist for dates, like: 2022-01-01 - 2022-01-10",
name = "YYYY-MM-DD",
num_args = 1..,
)]
pub generate: Option<Vec<String>>,
#[clap(long, help_heading = Some("Playlist"), help = "Optional path list for playlist generations", num_args = 1..)]
pub paths: Option<Vec<PathBuf>>,
#[clap(
short,
long,
help_heading = Some("Playlist"),
help = "Start time in 'hh:mm:ss', 'now' for start with first"
)]
pub start: Option<String>,
#[clap(short = 'T', long, help_heading = Some("Playlist"), help = "JSON template file for generating playlist")]
pub template: Option<PathBuf>,
#[clap(long, help_heading = Some("Playlist"), help = "Only validate given playlist")]
pub validate: bool,
#[clap(long, env, help_heading = Some("Playout"), help = "Run playout without webserver and frontend")]
pub foreground: bool,
#[clap(short, long, help_heading = Some("Playout"), help = "Play folder content")]
pub folder: Option<PathBuf>,
#[clap(long, env, help_heading = Some("Playout"), help = "Keep log file for given days")]
pub log_backup_count: Option<usize>,
#[clap(long, env, help_heading = Some("Playout"), help = "Add timestamp to log line")]
pub log_timestamp: bool,
#[clap(short, long, help_heading = Some("Playout"), help = "Set output mode: desktop, hls, null, stream")]
pub output: Option<OutputMode>,
#[clap(short, long, help_heading = Some("Playout"), help = "Set audio volume")]
pub volume: Option<f64>,
#[clap(long, help_heading = Some("Playout"), help = "Skip validation process")]
pub skip_validation: bool,
}
fn global_user(args: &mut Args) {
let mut user = String::new();
let mut mail = String::new();
if args.username.is_none() {
print!("Global admin: ");
stdout().flush().unwrap();
stdin()
.read_line(&mut user)
.expect("Did not enter a correct name?");
args.username = Some(user.trim().to_string());
}
if args.password.is_none() {
print!("Password: ");
stdout().flush().unwrap();
let password = read_password();
args.password = password.ok();
}
if args.mail.is_none() {
print!("Mail: ");
stdout().flush().unwrap();
stdin()
.read_line(&mut mail)
.expect("Did not enter a correct name?");
args.mail = Some(mail.trim().to_string());
}
}
pub async fn run_args(pool: &Pool<Sqlite>) -> Result<(), i32> {
let mut args = ARGS.clone();
if !args.dump_advanced && !args.dump_config && !args.drop_db {
if let Err(e) = handles::db_migrate(pool).await {
panic!("{e}");
};
}
let channels = handles::select_related_channels(pool, None)
.await
.unwrap_or(vec![Channel::default()]);
let mut error_code = -1;
if args.init {
let check_user = handles::select_users(pool).await;
let mut storage = String::new();
let mut playlist = String::new();
let mut logging = String::new();
let mut public = String::new();
let mut mail_smtp = String::new();
let mut mail_user = String::new();
let mut mail_starttls = String::new();
let mut global = handles::select_global(pool).await.map_err(|_| 1)?;
if check_user.unwrap_or_default().is_empty() {
global_user(&mut args);
}
if let Some(st) = args.storage {
global.storage = st;
} else {
print!("Storage path [{}]: ", global.storage);
stdout().flush().unwrap();
stdin()
.read_line(&mut storage)
.expect("Did not enter a correct path?");
if !storage.trim().is_empty() {
global.storage = storage
.trim()
.trim_matches(|c| c == '"' || c == '\'')
.to_string();
}
}
if let Some(pl) = args.playlists {
global.playlists = pl
} else {
print!("Playlist path [{}]: ", global.playlists);
stdout().flush().unwrap();
stdin()
.read_line(&mut playlist)
.expect("Did not enter a correct path?");
if !playlist.trim().is_empty() {
global.playlists = playlist
.trim()
.trim_matches(|c| c == '"' || c == '\'')
.to_string();
}
}
if let Some(lp) = args.logs {
global.logs = lp;
} else {
print!("Logging path [{}]: ", global.logs);
stdout().flush().unwrap();
stdin()
.read_line(&mut logging)
.expect("Did not enter a correct path?");
if !logging.trim().is_empty() {
global.logs = logging
.trim()
.trim_matches(|c| c == '"' || c == '\'')
.to_string();
}
}
if let Some(p) = args.public {
global.public = p;
} else {
print!("Public (HLS) path [{}]: ", global.public);
stdout().flush().unwrap();
stdin()
.read_line(&mut public)
.expect("Did not enter a correct path?");
if !public.trim().is_empty() {
global.public = public
.trim()
.trim_matches(|c| c == '"' || c == '\'')
.to_string();
}
}
if let Some(smtp) = args.mail_smtp {
global.mail_smtp = smtp;
} else {
print!("SMTP server [{}]: ", global.mail_smtp);
stdout().flush().unwrap();
stdin()
.read_line(&mut mail_smtp)
.expect("Did not enter a correct SMTP server?");
if !mail_smtp.trim().is_empty() {
global.mail_smtp = mail_smtp
.trim()
.trim_matches(|c| c == '"' || c == '\'')
.to_string();
}
}
if let Some(user) = args.mail_user {
global.mail_user = user;
} else {
print!("SMTP user [{}]: ", global.mail_user);
stdout().flush().unwrap();
stdin()
.read_line(&mut mail_user)
.expect("Did not enter a correct SMTP user?");
if !mail_user.trim().is_empty() {
global.mail_user = mail_user
.trim()
.trim_matches(|c| c == '"' || c == '\'')
.to_string();
}
}
if let Some(pass) = args.mail_password {
global.mail_password = pass;
} else {
print!(
"SMTP password [{}]: ",
if global.mail_password.is_empty() {
""
} else {
"********"
}
);
stdout().flush().unwrap();
let password = read_password().unwrap_or_default();
if !password.trim().is_empty() {
global.mail_password = password.trim().to_string();
}
}
if args.mail_starttls {
global.mail_starttls = true;
} else {
print!(
"SMTP use TLS [{}]: ",
if global.mail_starttls { "yes" } else { "no" }
);
stdout().flush().unwrap();
stdin()
.read_line(&mut mail_starttls)
.expect("Did not enter a yes or no?");
if !mail_starttls.trim().is_empty() {
global.mail_starttls = mail_starttls.trim().to_lowercase().starts_with('y');
}
}
if let Err(e) = handles::update_global(pool, global.clone()).await {
eprintln!("{e}");
error_code = 1;
};
let mut channel = handles::select_channel(pool, &1).await.unwrap();
channel.public = global.public;
channel.playlists = global.playlists;
channel.storage = global.storage;
let mut storage_path = PathBuf::from(channel.storage.clone());
if global.shared {
storage_path = storage_path.join("1");
channel.public = Path::new(&channel.public)
.join("1")
.to_string_lossy()
.to_string();
channel.playlists = Path::new(&channel.playlists)
.join("1")
.to_string_lossy()
.to_string();
channel.storage = storage_path.to_string_lossy().to_string();
};
if let Err(e) = copy_assets(&storage_path).await {
eprintln!("{e}");
};
handles::update_channel(pool, 1, channel).await.unwrap();
#[cfg(target_family = "unix")]
{
update_permissions().await;
}
println!("\nSet global settings done...");
} else if args.user_set {
global_user(&mut args);
}
if let Some(username) = args.username {
error_code = 0;
let chl: Vec<i32> = channels.clone().iter().map(|c| c.id).collect();
let ff_user = User {
id: 0,
mail: Some(args.mail.unwrap()),
username: username.clone(),
password: args.password.unwrap(),
role_id: Some(1),
channel_ids: Some(chl.clone()),
token: None,
};
if let Err(e) = handles::insert_or_update_user(pool, ff_user).await {
eprintln!("{e}");
error_code = 1;
};
println!("Create/update global admin user \"{username}\" done...");
}
if ARGS.list_channels {
let chl = channels
.iter()
.map(|c| (c.id, c.name.clone()))
.collect::<Vec<(i32, String)>>();
println!(
"Available channels:\n{}",
chl.iter()
.map(|(i, t)| format!(" {i}: '{t}'"))
.collect::<Vec<String>>()
.join("\n")
);
error_code = 0;
}
if ARGS.dump_advanced {
if let Some(channels) = &ARGS.channels {
for id in channels {
match AdvancedConfig::dump(pool, *id).await {
Ok(_) => {
println!("Dump config to: advanced_{id}.toml");
error_code = 0;
}
Err(e) => {
eprintln!("Dump config: {e}");
error_code = 1;
}
};
}
} else {
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
error_code = 1;
}
}
if ARGS.dump_config {
if let Some(channels) = &ARGS.channels {
for id in channels {
match PlayoutConfig::dump(pool, *id).await {
Ok(_) => {
println!("Dump config to: ffplayout_{id}.toml");
error_code = 0;
}
Err(e) => {
eprintln!("Dump config: {e}");
error_code = 1;
}
};
}
} else {
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
error_code = 1;
}
}
if let Some(path) = &ARGS.import_advanced {
if let Some(channels) = &ARGS.channels {
for id in channels {
match AdvancedConfig::import(pool, *id, path).await {
Ok(_) => {
println!("Import config done...");
error_code = 0;
}
Err(e) => {
eprintln!("{e}");
error_code = 1;
}
};
}
} else {
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
error_code = 1;
}
}
if let Some(path) = &ARGS.import_config {
if let Some(channels) = &ARGS.channels {
for id in channels {
match PlayoutConfig::import(pool, *id, path).await {
Ok(_) => {
println!("Import config done...");
error_code = 0;
}
Err(e) => {
eprintln!("{e}");
error_code = 1;
}
};
}
} else {
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
error_code = 1;
}
}
if error_code > -1 {
Err(error_code)
} else {
Ok(())
}
}
#[cfg(target_family = "unix")]
async fn update_permissions() {
let db_path = Path::new(db_path().unwrap());
let uid = nix::unistd::Uid::current();
let parent_owner = db_path.parent().unwrap().metadata().unwrap().uid();
let user = nix::unistd::User::from_uid(parent_owner.into())
.unwrap_or_default()
.unwrap();
if uid.is_root() && uid.to_string() != parent_owner.to_string() {
println!("Adjust DB permission...");
let db = fs::canonicalize(db_path).await.unwrap();
let shm = fs::canonicalize(db_path.with_extension("db-shm"))
.await
.unwrap();
let wal = fs::canonicalize(db_path.with_extension("db-wal"))
.await
.unwrap();
nix::unistd::chown(&db, Some(user.uid), Some(user.gid)).expect("Change DB owner");
if shm.is_file() {
nix::unistd::chown(&shm, Some(user.uid), Some(user.gid)).expect("Change DB-SHM owner");
}
if wal.is_file() {
nix::unistd::chown(&wal, Some(user.uid), Some(user.gid)).expect("Change DB-WAL owner");
}
}
}

View File

@ -1,90 +0,0 @@
use std::{
io,
path::PathBuf,
sync::{Arc, Mutex},
};
use log::*;
use sqlx::{Pool, Sqlite};
use super::logging::MailQueue;
use crate::db::{handles, models::Channel};
use crate::player::controller::{ChannelController, ChannelManager};
use crate::utils::{config::get_config, copy_assets, errors::ServiceError};
async fn map_global_admins(conn: &Pool<Sqlite>) -> Result<(), ServiceError> {
let channels = handles::select_related_channels(conn, None).await?;
let admins = handles::select_global_admins(conn).await?;
for admin in admins {
if let Err(e) =
handles::insert_user_channel(conn, admin.id, channels.iter().map(|c| c.id).collect())
.await
{
error!("Update global admin: {e}");
};
}
Ok(())
}
pub async fn create_channel(
conn: &Pool<Sqlite>,
controllers: Arc<Mutex<ChannelController>>,
queue: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>,
target_channel: Channel,
) -> Result<Channel, ServiceError> {
let channel = handles::insert_channel(conn, target_channel).await?;
handles::new_channel_presets(conn, channel.id).await?;
handles::update_channel(conn, channel.id, channel.clone()).await?;
let output_param = "-c:v libx264 -crf 23 -x264-params keyint=50:min-keyint=25:scenecut=-1 -maxrate 1300k -bufsize 2600k -preset faster -tune zerolatency -profile:v Main -level 3.1 -c:a aac -ar 44100 -b:a 128k -flags +cgop -f hls -hls_time 6 -hls_list_size 600 -hls_flags append_list+delete_segments+omit_endlist -hls_segment_filename live/stream-%d.ts live/stream.m3u8".to_string();
handles::insert_advanced_configuration(conn, channel.id).await?;
handles::insert_configuration(conn, channel.id, output_param).await?;
let config = get_config(conn, channel.id).await?;
let m_queue = Arc::new(Mutex::new(MailQueue::new(channel.id, config.mail.clone())));
let manager = ChannelManager::new(Some(conn.clone()), channel.clone(), config.clone());
if let Err(e) = copy_assets(&PathBuf::from(&config.storage.path)).await {
error!("{e}");
};
controllers
.lock()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
.add(manager);
if let Ok(mut mqs) = queue.lock() {
mqs.push(m_queue.clone());
}
map_global_admins(conn).await?;
Ok(channel)
}
pub async fn delete_channel(
conn: &Pool<Sqlite>,
id: i32,
controllers: Arc<Mutex<ChannelController>>,
queue: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>,
) -> Result<(), ServiceError> {
let channel = handles::select_channel(conn, &id).await?;
handles::delete_channel(conn, &channel.id).await?;
controllers
.lock()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
.remove(id);
if let Ok(mut mqs) = queue.lock() {
mqs.retain(|q| q.lock().unwrap().id != id);
}
map_global_admins(conn).await?;
Ok(())
}

View File

@ -1,972 +0,0 @@
use std::{
fmt,
path::{Path, PathBuf},
str::FromStr,
};
use chrono::NaiveTime;
use flexi_logger::Level;
use regex::Regex;
use serde::{Deserialize, Serialize};
use shlex::split;
use sqlx::{Pool, Sqlite};
use tokio::{fs, io::AsyncReadExt};
use ts_rs::TS;
use crate::db::{handles, models};
use crate::utils::{files::norm_abs_path, gen_tcp_socket, time_to_sec};
use crate::vec_strings;
use crate::AdvancedConfig;
use crate::ARGS;
use super::errors::ServiceError;
pub const DUMMY_LEN: f64 = 60.0;
pub const IMAGE_FORMAT: [&str; 21] = [
"bmp", "dds", "dpx", "exr", "gif", "hdr", "j2k", "jpg", "jpeg", "pcx", "pfm", "pgm", "phm",
"png", "psd", "ppm", "sgi", "svg", "tga", "tif", "webp",
];
// Some well known errors can be safely ignore
pub const FFMPEG_IGNORE_ERRORS: [&str; 13] = [
"ac-tex damaged",
"codec s302m, is muxed as a private data stream",
"corrupt decoded frame in stream",
"corrupt input packet in stream",
"end mismatch left",
"Invalid mb type in I-frame at",
"Packet corrupt",
"Referenced QT chapter track not found",
"skipped MB in I-frame at",
"Thread message queue blocking",
"timestamp discontinuity",
"Warning MVs not available",
"frame size not set",
];
pub const FFMPEG_UNRECOVERABLE_ERRORS: [&str; 6] = [
"Address already in use",
"Invalid argument",
"Numerical result",
"Error initializing complex filters",
"Error while decoding stream #0:0: Invalid data found when processing input",
"Unrecognized option",
];
#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
#[serde(rename_all = "lowercase")]
pub enum OutputMode {
Desktop,
HLS,
Null,
Stream,
}
impl OutputMode {
fn new(s: &str) -> Self {
match s {
"desktop" => Self::Desktop,
"null" => Self::Null,
"stream" => Self::Stream,
_ => Self::HLS,
}
}
}
impl Default for OutputMode {
fn default() -> Self {
Self::HLS
}
}
impl FromStr for OutputMode {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
match input {
"desktop" => Ok(Self::Desktop),
"hls" => Ok(Self::HLS),
"null" => Ok(Self::Null),
"stream" => Ok(Self::Stream),
_ => Err("Use 'desktop', 'hls', 'null' or 'stream'".to_string()),
}
}
}
impl fmt::Display for OutputMode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
OutputMode::Desktop => write!(f, "desktop"),
OutputMode::HLS => write!(f, "hls"),
OutputMode::Null => write!(f, "null"),
OutputMode::Stream => write!(f, "stream"),
}
}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, Eq, PartialEq, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
#[serde(rename_all = "lowercase")]
pub enum ProcessMode {
Folder,
#[default]
Playlist,
}
impl ProcessMode {
fn new(s: &str) -> Self {
match s {
"folder" => Self::Folder,
_ => Self::Playlist,
}
}
}
impl fmt::Display for ProcessMode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ProcessMode::Folder => write!(f, "folder"),
ProcessMode::Playlist => write!(f, "playlist"),
}
}
}
impl FromStr for ProcessMode {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
match input {
"folder" => Ok(Self::Folder),
"playlist" => Ok(Self::Playlist),
_ => Err("Use 'folder' or 'playlist'".to_string()),
}
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)]
pub struct Template {
pub sources: Vec<Source>,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)]
pub struct Source {
#[ts(type = "string")]
pub start: NaiveTime,
#[ts(type = "string")]
pub duration: NaiveTime,
pub shuffle: bool,
pub paths: Vec<PathBuf>,
}
/// Channel Config
///
/// This we init ones, when ffplayout is starting and use them globally in the hole program.
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct PlayoutConfig {
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub channel: Channel,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub advanced: AdvancedConfig,
pub general: General,
pub mail: Mail,
pub logging: Logging,
pub processing: Processing,
pub ingest: Ingest,
pub playlist: Playlist,
pub storage: Storage,
pub text: Text,
pub task: Task,
#[serde(alias = "out")]
pub output: Output,
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
pub struct Channel {
pub logs: PathBuf,
pub public: PathBuf,
pub playlists: PathBuf,
pub storage: PathBuf,
pub shared: bool,
}
impl Channel {
pub fn new(config: &models::GlobalSettings, channel: models::Channel) -> Self {
Self {
logs: PathBuf::from(config.logs.clone()),
public: PathBuf::from(channel.public.clone()),
playlists: PathBuf::from(channel.playlists.clone()),
storage: PathBuf::from(channel.storage.clone()),
shared: config.shared,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct General {
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub id: i32,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub channel_id: i32,
pub stop_threshold: f64,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub generate: Option<Vec<String>>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub ffmpeg_filters: Vec<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub ffmpeg_libs: Vec<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub template: Option<Template>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub skip_validation: bool,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub validate: bool,
}
impl General {
fn new(config: &models::Configuration) -> Self {
Self {
id: config.id,
channel_id: config.channel_id,
stop_threshold: config.general_stop_threshold,
generate: None,
ffmpeg_filters: vec![],
ffmpeg_libs: vec![],
template: None,
skip_validation: false,
validate: false,
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Mail {
#[serde(skip_deserializing)]
pub show: bool,
pub subject: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub smtp_server: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub starttls: bool,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub sender_addr: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub sender_pass: String,
pub recipient: String,
#[ts(type = "string")]
pub mail_level: Level,
pub interval: i64,
}
impl Mail {
fn new(global: &models::GlobalSettings, config: &models::Configuration) -> Self {
Self {
show: !global.mail_password.is_empty() && global.mail_smtp != "mail.example.org",
subject: config.mail_subject.clone(),
smtp_server: global.mail_smtp.clone(),
starttls: global.mail_starttls,
sender_addr: global.mail_user.clone(),
sender_pass: global.mail_password.clone(),
recipient: config.mail_recipient.clone(),
mail_level: string_to_log_level(config.mail_level.clone()),
interval: config.mail_interval,
}
}
}
impl Default for Mail {
fn default() -> Self {
Mail {
show: false,
subject: String::default(),
smtp_server: String::default(),
starttls: bool::default(),
sender_addr: String::default(),
sender_pass: String::default(),
recipient: String::default(),
mail_level: Level::Debug,
interval: i64::default(),
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Logging {
pub ffmpeg_level: String,
pub ingest_level: String,
pub detect_silence: bool,
pub ignore_lines: Vec<String>,
}
impl Logging {
fn new(config: &models::Configuration) -> Self {
Self {
ffmpeg_level: config.logging_ffmpeg_level.clone(),
ingest_level: config.logging_ingest_level.clone(),
detect_silence: config.logging_detect_silence,
ignore_lines: config
.logging_ignore
.split(';')
.map(|s| s.to_string())
.collect(),
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Processing {
pub mode: ProcessMode,
pub audio_only: bool,
pub copy_audio: bool,
pub copy_video: bool,
pub width: i64,
pub height: i64,
pub aspect: f64,
pub fps: f64,
pub add_logo: bool,
pub logo: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub logo_path: String,
pub logo_scale: String,
pub logo_opacity: f64,
pub logo_position: String,
pub audio_tracks: i32,
#[serde(default = "default_track_index")]
pub audio_track_index: i32,
pub audio_channels: u8,
pub volume: f64,
pub custom_filter: String,
#[serde(default)]
pub vtt_enable: bool,
#[serde(default)]
pub vtt_dummy: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub cmd: Option<Vec<String>>,
}
impl Processing {
fn new(config: &models::Configuration) -> Self {
Self {
mode: ProcessMode::new(&config.processing_mode.clone()),
audio_only: config.processing_audio_only,
audio_track_index: config.processing_audio_track_index,
copy_audio: config.processing_copy_audio,
copy_video: config.processing_copy_video,
width: config.processing_width,
height: config.processing_height,
aspect: config.processing_aspect,
fps: config.processing_fps,
add_logo: config.processing_add_logo,
logo: config.processing_logo.clone(),
logo_path: config.processing_logo.clone(),
logo_scale: config.processing_logo_scale.clone(),
logo_opacity: config.processing_logo_opacity,
logo_position: config.processing_logo_position.clone(),
audio_tracks: config.processing_audio_tracks,
audio_channels: config.processing_audio_channels,
volume: config.processing_volume,
custom_filter: config.processing_filter.clone(),
vtt_enable: config.processing_vtt_enable,
vtt_dummy: config.processing_vtt_dummy.clone(),
cmd: None,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Ingest {
pub enable: bool,
pub input_param: String,
pub custom_filter: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub input_cmd: Option<Vec<String>>,
}
impl Ingest {
fn new(config: &models::Configuration) -> Self {
Self {
enable: config.ingest_enable,
input_param: config.ingest_param.clone(),
custom_filter: config.ingest_filter.clone(),
input_cmd: None,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Playlist {
pub day_start: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub start_sec: Option<f64>,
pub length: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub length_sec: Option<f64>,
pub infinit: bool,
}
impl Playlist {
fn new(config: &models::Configuration) -> Self {
Self {
day_start: config.playlist_day_start.clone(),
start_sec: None,
length: config.playlist_length.clone(),
length_sec: None,
infinit: config.playlist_infinit,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Storage {
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub path: PathBuf,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub paths: Vec<PathBuf>,
pub filler: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub filler_path: PathBuf,
pub extensions: Vec<String>,
pub shuffle: bool,
#[serde(skip_deserializing)]
pub shared_storage: bool,
}
impl Storage {
fn new(config: &models::Configuration, path: PathBuf, shared_storage: bool) -> Self {
Self {
path,
paths: vec![],
filler: config.storage_filler.clone(),
filler_path: PathBuf::from(config.storage_filler.clone()),
extensions: config
.storage_extensions
.split(';')
.map(|s| s.to_string())
.collect(),
shuffle: config.storage_shuffle,
shared_storage,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Text {
pub add_text: bool,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub node_pos: Option<usize>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub zmq_stream_socket: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub zmq_server_socket: Option<String>,
#[serde(alias = "fontfile")]
pub font: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub font_path: String,
pub text_from_filename: bool,
pub style: String,
pub regex: String,
}
impl Text {
fn new(config: &models::Configuration) -> Self {
Self {
add_text: config.text_add,
node_pos: None,
zmq_stream_socket: None,
zmq_server_socket: None,
font: config.text_font.clone(),
font_path: config.text_font.clone(),
text_from_filename: config.text_from_filename,
style: config.text_style.clone(),
regex: config.text_regex.clone(),
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Task {
pub enable: bool,
pub path: PathBuf,
}
impl Task {
fn new(config: &models::Configuration) -> Self {
Self {
enable: config.task_enable,
path: PathBuf::from(config.task_path.clone()),
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
#[ts(export, export_to = "playout_config.d.ts")]
pub struct Output {
pub mode: OutputMode,
pub output_param: String,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub output_count: usize,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub output_filter: Option<String>,
#[ts(skip)]
#[serde(skip_serializing, skip_deserializing)]
pub output_cmd: Option<Vec<String>>,
}
impl Output {
fn new(config: &models::Configuration) -> Self {
Self {
mode: OutputMode::new(&config.output_mode),
output_param: config.output_param.clone(),
output_count: 0,
output_filter: None,
output_cmd: None,
}
}
}
pub fn string_to_log_level(l: String) -> Level {
match l.to_lowercase().as_str() {
"error" => Level::Error,
"info" => Level::Info,
"trace" => Level::Trace,
"warning" => Level::Warn,
_ => Level::Debug,
}
}
pub fn string_to_processing_mode(l: String) -> ProcessMode {
match l.to_lowercase().as_str() {
"playlist" => ProcessMode::Playlist,
"folder" => ProcessMode::Folder,
_ => ProcessMode::Playlist,
}
}
pub fn string_to_output_mode(l: String) -> OutputMode {
match l.to_lowercase().as_str() {
"desktop" => OutputMode::Desktop,
"hls" => OutputMode::HLS,
"null" => OutputMode::Null,
"stream" => OutputMode::Stream,
_ => OutputMode::HLS,
}
}
fn default_track_index() -> i32 {
-1
}
// fn default_tracks() -> i32 {
// 1
// }
// fn default_channels() -> u8 {
// 2
// }
impl PlayoutConfig {
pub async fn new(pool: &Pool<Sqlite>, channel_id: i32) -> Result<Self, ServiceError> {
let global = handles::select_global(pool).await?;
let channel = handles::select_channel(pool, &channel_id).await?;
let config = handles::select_configuration(pool, channel_id).await?;
let adv_config = handles::select_advanced_configuration(pool, channel_id).await?;
let channel = Channel::new(&global, channel);
let advanced = AdvancedConfig::new(adv_config);
let general = General::new(&config);
let mail = Mail::new(&global, &config);
let logging = Logging::new(&config);
let mut processing = Processing::new(&config);
let mut ingest = Ingest::new(&config);
let mut playlist = Playlist::new(&config);
let mut text = Text::new(&config);
let task = Task::new(&config);
let mut output = Output::new(&config);
if !channel.storage.is_dir() {
tokio::fs::create_dir_all(&channel.storage)
.await
.unwrap_or_else(|_| panic!("Can't create storage folder: {:#?}", channel.storage));
}
let mut storage = Storage::new(&config, channel.storage.clone(), channel.shared);
if !channel.playlists.is_dir() {
tokio::fs::create_dir_all(&channel.playlists).await?;
}
if !channel.logs.is_dir() {
tokio::fs::create_dir_all(&channel.logs).await?;
}
let (filler_path, _, filler) = norm_abs_path(&channel.storage, &config.storage_filler)?;
storage.filler = filler;
storage.filler_path = filler_path;
playlist.start_sec = Some(time_to_sec(&playlist.day_start));
if playlist.length.contains(':') {
playlist.length_sec = Some(time_to_sec(&playlist.length));
} else {
playlist.length_sec = Some(86400.0);
}
let (logo_path, _, logo) = norm_abs_path(&channel.storage, &processing.logo)?;
if processing.add_logo && !logo_path.is_file() {
processing.add_logo = false;
}
processing.logo = logo;
processing.logo_path = logo_path.to_string_lossy().to_string();
if processing.audio_tracks < 1 {
processing.audio_tracks = 1
}
let mut process_cmd = vec_strings![];
if processing.audio_only {
process_cmd.append(&mut vec_strings!["-vn"]);
} else if processing.copy_video {
process_cmd.append(&mut vec_strings!["-c:v", "copy"]);
} else if let Some(decoder_cmd) = &advanced.decoder.output_cmd {
process_cmd.append(&mut decoder_cmd.clone());
} else {
let bitrate = format!("{}k", processing.width * processing.height / 16);
let buff_size = format!("{}k", (processing.width * processing.height / 16) / 2);
process_cmd.append(&mut vec_strings![
"-pix_fmt",
"yuv420p",
"-r",
&processing.fps,
"-c:v",
"mpeg2video",
"-g",
"1",
"-b:v",
&bitrate,
"-minrate",
&bitrate,
"-maxrate",
&bitrate,
"-bufsize",
&buff_size,
"-mpegts_flags",
"initial_discontinuity"
]);
}
if processing.copy_audio {
process_cmd.append(&mut vec_strings!["-c:a", "copy"]);
} else if advanced.decoder.output_cmd.is_none() {
process_cmd.append(&mut pre_audio_codec(
&processing.custom_filter,
&ingest.custom_filter,
processing.audio_channels,
));
}
process_cmd.append(&mut vec_strings!["-f", "mpegts", "-"]);
processing.cmd = Some(process_cmd);
ingest.input_cmd = split(ingest.input_param.as_str());
output.output_count = 1;
output.output_filter = None;
if output.mode == OutputMode::Null {
output.output_cmd = Some(vec_strings!["-f", "null", "-"]);
} else if let Some(mut cmd) = split(output.output_param.as_str()) {
// get output count according to the var_stream_map value, or by counting output parameters
if let Some(i) = cmd.clone().iter().position(|m| m == "-var_stream_map") {
output.output_count = cmd[i + 1].split_whitespace().count();
} else {
output.output_count = cmd
.iter()
.enumerate()
.filter(|(i, p)| i > &0 && !p.starts_with('-') && !cmd[i - 1].starts_with('-'))
.count();
}
if let Some(i) = cmd.clone().iter().position(|r| r == "-filter_complex") {
output.output_filter = Some(cmd[i + 1].clone());
cmd.remove(i);
cmd.remove(i);
}
let is_tee_muxer = cmd.contains(&"tee".to_string());
for item in cmd.iter_mut() {
if item.ends_with(".ts") || (item.ends_with(".m3u8") && item != "master.m3u8") {
if is_tee_muxer {
// Processes the `item` string to replace `.ts` and `.m3u8` filenames with their absolute paths.
// Ensures that the corresponding directories exist.
//
// - Uses regular expressions to identify `.ts` and `.m3u8` filenames within the `item` string.
// - For each identified filename, normalizes its path and checks if the parent directory exists.
// - Creates the parent directory if it does not exist.
// - Replaces the original filename in the `item` string with the normalized absolute path.
let re_ts = Regex::new(r"filename=(\S+?\.ts)").unwrap();
let re_m3 = Regex::new(r"\](\S+?\.m3u8)").unwrap();
for s in item.clone().split('|') {
if let Some(ts) = re_ts.captures(s).and_then(|p| p.get(1)) {
let (segment_path, _, _) =
norm_abs_path(&channel.public, ts.as_str())?;
let parent = segment_path.parent().ok_or("HLS parent path")?;
if !parent.is_dir() {
fs::create_dir_all(parent).await?;
}
item.clone_from(
&item.replace(ts.as_str(), &segment_path.to_string_lossy()),
);
}
if let Some(m3) = re_m3.captures(s).and_then(|p| p.get(1)) {
let (m3u8_path, _, _) =
norm_abs_path(&channel.public, m3.as_str())?;
let parent = m3u8_path.parent().ok_or("HLS parent path")?;
if !parent.is_dir() {
fs::create_dir_all(parent).await?;
}
item.clone_from(
&item.replace(m3.as_str(), &m3u8_path.to_string_lossy()),
);
}
}
} else if let Ok((public, _, _)) = norm_abs_path(&channel.public, item) {
let parent = public.parent().ok_or("HLS parent path")?;
if !parent.is_dir() {
fs::create_dir_all(parent).await?;
}
item.clone_from(&public.to_string_lossy().to_string());
};
}
}
output.output_cmd = Some(cmd);
}
// when text overlay without text_from_filename is on, turn also the RPC server on,
// to get text messages from it
if text.add_text && !text.text_from_filename {
text.zmq_stream_socket = gen_tcp_socket(String::new());
text.zmq_server_socket =
gen_tcp_socket(text.zmq_stream_socket.clone().unwrap_or_default());
text.node_pos = Some(2);
} else {
text.zmq_stream_socket = None;
text.zmq_server_socket = None;
text.node_pos = None;
}
let (font_path, _, font) = norm_abs_path(&channel.storage, &text.font)?;
text.font = font;
text.font_path = font_path.to_string_lossy().to_string();
Ok(Self {
channel,
advanced,
general,
mail,
logging,
processing,
ingest,
playlist,
storage,
text,
task,
output,
})
}
pub async fn dump(pool: &Pool<Sqlite>, id: i32) -> Result<(), ServiceError> {
let config = Self::new(pool, id).await?;
let toml_string = toml_edit::ser::to_string_pretty(&config)?;
tokio::fs::write(&format!("ffplayout_{id}.toml"), toml_string).await?;
Ok(())
}
pub async fn import(pool: &Pool<Sqlite>, id: i32, path: &Path) -> Result<(), ServiceError> {
if path.is_file() {
let mut file = tokio::fs::File::open(path).await?;
let mut contents = String::new();
file.read_to_string(&mut contents).await?;
let config: PlayoutConfig = toml_edit::de::from_str(&contents).unwrap();
handles::update_configuration(pool, id, config).await?;
} else {
return Err(ServiceError::BadRequest("Path not exists!".to_string()));
}
Ok(())
}
}
// impl Default for PlayoutConfig {
// fn default() -> Self {
// Self::new(1)
// }
// }
/// When custom_filter contains loudnorm filter use a different audio encoder,
/// s302m has higher quality, but is experimental
/// and works not well together with the loudnorm filter.
fn pre_audio_codec(proc_filter: &str, ingest_filter: &str, channel_count: u8) -> Vec<String> {
let mut codec = vec_strings![
"-c:a",
"s302m",
"-strict",
"-2",
"-sample_fmt",
"s16",
"-ar",
"48000",
"-ac",
channel_count
];
if proc_filter.contains("loudnorm") || ingest_filter.contains("loudnorm") {
codec = vec_strings![
"-c:a",
"mp2",
"-b:a",
"384k",
"-ar",
"48000",
"-ac",
channel_count
];
}
codec
}
/// Read command line arguments, and override the config with them.
pub async fn get_config(
pool: &Pool<Sqlite>,
channel_id: i32,
) -> Result<PlayoutConfig, ServiceError> {
let mut config = PlayoutConfig::new(pool, channel_id).await?;
let args = ARGS.clone();
config.general.generate = args.generate;
config.general.validate = args.validate;
config.general.skip_validation = args.skip_validation;
if let Some(template_file) = args.template {
let mut f = fs::File::options()
.read(true)
.write(false)
.open(template_file)
.await?;
let mut buffer = Vec::new();
f.read_to_end(&mut buffer).await?;
let mut template: Template = serde_json::from_slice(&buffer)?;
template.sources.sort_by(|d1, d2| d1.start.cmp(&d2.start));
config.general.template = Some(template);
}
if let Some(paths) = args.paths {
config.storage.paths = paths;
}
if let Some(playlist) = args.playlists {
config.channel.playlists = PathBuf::from(&playlist);
}
if let Some(folder) = args.folder {
config.channel.storage = folder;
config.processing.mode = ProcessMode::Folder;
}
if let Some(start) = args.start {
config.playlist.day_start.clone_from(&start);
config.playlist.start_sec = Some(time_to_sec(&start));
}
if let Some(output) = args.output {
config.output.mode = output;
if config.output.mode == OutputMode::Null {
config.output.output_count = 1;
config.output.output_filter = None;
config.output.output_cmd = Some(vec_strings!["-f", "null", "-"]);
}
}
if let Some(volume) = args.volume {
config.processing.volume = volume;
}
if let Some(mail_smtp) = args.mail_smtp {
config.mail.smtp_server = mail_smtp;
}
if let Some(mail_user) = args.mail_user {
config.mail.sender_addr = mail_user;
}
if let Some(mail_password) = args.mail_password {
config.mail.sender_pass = mail_password;
}
if args.mail_starttls {
config.mail.starttls = true;
}
Ok(config)
}

View File

@ -1,232 +0,0 @@
use std::{error::Error, fmt, str::FromStr, sync::atomic::Ordering};
use log::*;
use serde::{Deserialize, Serialize};
use serde_json::{json, Map, Value};
use sqlx::{Pool, Sqlite};
use zeromq::{Socket, SocketRecv, SocketSend, ZmqMessage};
use crate::db::handles;
use crate::player::{
controller::{ChannelManager, ProcessUnit::*},
utils::{get_delta, get_media_map},
};
use crate::utils::{config::OutputMode::*, errors::ServiceError, logging::Target, TextFilter};
#[derive(Debug, Deserialize, Serialize, Clone)]
struct TextParams {
control: String,
message: TextFilter,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ControlParams {
pub control: String,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
struct MediaParams {
media: String,
}
#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ProcessCtl {
Status,
Start,
Stop,
Restart,
}
impl FromStr for ProcessCtl {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
match input.to_lowercase().as_str() {
"status" => Ok(Self::Status),
"start" => Ok(Self::Start),
"stop" => Ok(Self::Stop),
"restart" => Ok(Self::Restart),
_ => Err(format!("Command '{input}' not found!")),
}
}
}
impl fmt::Display for ProcessCtl {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::Status => write!(f, "status"),
Self::Start => write!(f, "start"),
Self::Stop => write!(f, "stop"),
Self::Restart => write!(f, "restart"),
}
}
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct Process {
pub command: ProcessCtl,
}
async fn zmq_send(msg: &str, socket_addr: &str) -> Result<String, Box<dyn Error>> {
let mut socket = zeromq::ReqSocket::new();
socket.connect(&format!("tcp://{socket_addr}")).await?;
socket.send(msg.into()).await?;
let repl: ZmqMessage = socket.recv().await?;
let response = String::from_utf8(repl.into_vec()[0].to_vec())?;
Ok(response)
}
pub async fn send_message(
manager: ChannelManager,
message: TextFilter,
) -> Result<Map<String, Value>, ServiceError> {
let filter = message.to_string();
let mut data_map = Map::new();
let config = manager.config.lock().unwrap().clone();
let id = config.general.channel_id;
if config.text.zmq_stream_socket.is_some() {
if let Some(clips_filter) = manager.filter_chain.clone() {
*clips_filter.lock().unwrap() = vec![filter.clone()];
}
if config.output.mode == HLS {
if manager.ingest_is_running.load(Ordering::SeqCst) {
let filter_server = format!("drawtext@dyntext reinit {filter}");
if let Ok(reply) = zmq_send(
&filter_server,
&config.text.zmq_server_socket.clone().unwrap(),
)
.await
{
data_map.insert("message".to_string(), json!(reply));
return Ok(data_map);
};
} else if let Err(e) = manager.stop(Ingest) {
error!(target: Target::file_mail(), channel = id; "Ingest {e:?}")
}
}
if config.output.mode != HLS || !manager.ingest_is_running.load(Ordering::SeqCst) {
let filter_stream = format!("drawtext@dyntext reinit {filter}");
if let Ok(reply) = zmq_send(
&filter_stream,
&config.text.zmq_stream_socket.clone().unwrap(),
)
.await
{
data_map.insert("message".to_string(), json!(reply));
return Ok(data_map);
};
}
}
Err(ServiceError::ServiceUnavailable(
"text message missing!".to_string(),
))
}
pub async fn control_state(
conn: &Pool<Sqlite>,
manager: &ChannelManager,
command: &str,
) -> Result<Map<String, Value>, ServiceError> {
let config = manager.config.lock().unwrap().clone();
let id = config.general.channel_id;
let current_date = manager.current_date.lock().unwrap().clone();
let current_list = manager.current_list.lock().unwrap().clone();
let mut date = manager.current_date.lock().unwrap().clone();
let index = manager.current_index.load(Ordering::SeqCst);
match command {
"back" => {
if index > 1 && current_list.len() > 1 {
let mut data_map = Map::new();
let mut media = current_list[index - 2].clone();
let (delta, _) = get_delta(&config, &media.begin.unwrap_or(0.0));
info!(target: Target::file_mail(), channel = id; "Move to last clip");
manager.current_index.fetch_sub(2, Ordering::SeqCst);
if let Err(e) = media.add_probe(false) {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
manager.channel.lock().unwrap().time_shift = delta;
date.clone_from(&current_date);
handles::update_stat(conn, config.general.channel_id, current_date, delta).await?;
if manager.stop(Decoder).is_err() {
return Err(ServiceError::InternalServerError);
};
data_map.insert("operation".to_string(), json!("move_to_last"));
data_map.insert("shifted_seconds".to_string(), json!(delta));
data_map.insert("media".to_string(), get_media_map(media));
return Ok(data_map);
}
}
"next" => {
if index < current_list.len() {
let mut data_map = Map::new();
let mut media = current_list[index].clone();
let (delta, _) = get_delta(&config, &media.begin.unwrap_or(0.0));
info!(target: Target::file_mail(), channel = id; "Move to next clip");
if let Err(e) = media.add_probe(false) {
error!(target: Target::file_mail(), channel = id; "{e:?}");
};
manager.channel.lock().unwrap().time_shift = delta;
date.clone_from(&current_date);
handles::update_stat(conn, config.general.channel_id, current_date, delta).await?;
if manager.stop(Decoder).is_err() {
return Err(ServiceError::InternalServerError);
};
data_map.insert("operation".to_string(), json!("move_to_next"));
data_map.insert("shifted_seconds".to_string(), json!(delta));
data_map.insert("media".to_string(), get_media_map(media));
return Ok(data_map);
}
}
"reset" => {
let mut data_map = Map::new();
info!(target: Target::file_mail(), channel = id; "Reset playout to original state");
manager.channel.lock().unwrap().time_shift = 0.0;
date.clone_from(&current_date);
manager.list_init.store(true, Ordering::SeqCst);
handles::update_stat(conn, config.general.channel_id, current_date, 0.0).await?;
if manager.stop(Decoder).is_err() {
return Err(ServiceError::InternalServerError);
};
data_map.insert("operation".to_string(), json!("reset_playout_state"));
return Ok(data_map);
}
_ => {
return Err(ServiceError::ServiceUnavailable(
"Command not found!".to_string(),
))
}
}
Ok(Map::new())
}

View File

@ -1,202 +0,0 @@
use std::io;
use actix_web::{error::ResponseError, Error, HttpResponse};
use derive_more::Display;
use ffprobe::FfProbeError;
#[derive(Debug, Display)]
pub enum ServiceError {
#[display("Internal Server Error")]
InternalServerError,
#[display("BadRequest: {_0}")]
BadRequest(String),
#[display("Conflict: {_0}")]
Conflict(String),
#[display("Forbidden: {_0}")]
Forbidden(String),
#[display("Unauthorized: {_0}")]
Unauthorized(String),
#[display("NoContent: {_0}")]
NoContent(String),
#[display("ServiceUnavailable: {_0}")]
ServiceUnavailable(String),
}
// impl ResponseError trait allows to convert our errors into http responses with appropriate data
impl ResponseError for ServiceError {
fn error_response(&self) -> HttpResponse {
match self {
ServiceError::InternalServerError => {
HttpResponse::InternalServerError().json("Internal Server Error. Please try later.")
}
ServiceError::BadRequest(ref message) => HttpResponse::BadRequest().json(message),
ServiceError::Conflict(ref message) => HttpResponse::Conflict().json(message),
ServiceError::Forbidden(ref message) => HttpResponse::Forbidden().json(message),
ServiceError::Unauthorized(ref message) => HttpResponse::Unauthorized().json(message),
ServiceError::NoContent(ref message) => HttpResponse::NoContent().json(message),
ServiceError::ServiceUnavailable(ref message) => {
HttpResponse::ServiceUnavailable().json(message)
}
}
}
}
impl From<String> for ServiceError {
fn from(err: String) -> ServiceError {
ServiceError::BadRequest(err)
}
}
impl From<Error> for ServiceError {
fn from(err: Error) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<actix_multipart::MultipartError> for ServiceError {
fn from(err: actix_multipart::MultipartError) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<std::io::Error> for ServiceError {
fn from(err: std::io::Error) -> ServiceError {
ServiceError::NoContent(err.to_string())
}
}
impl From<std::num::ParseIntError> for ServiceError {
fn from(err: std::num::ParseIntError) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<jsonwebtoken::errors::Error> for ServiceError {
fn from(err: jsonwebtoken::errors::Error) -> ServiceError {
ServiceError::Unauthorized(err.to_string())
}
}
impl From<actix_web::error::BlockingError> for ServiceError {
fn from(err: actix_web::error::BlockingError) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<sqlx::Error> for ServiceError {
fn from(err: sqlx::Error) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<tokio::task::JoinError> for ServiceError {
fn from(err: tokio::task::JoinError) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<toml_edit::ser::Error> for ServiceError {
fn from(err: toml_edit::ser::Error) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<toml_edit::TomlError> for ServiceError {
fn from(err: toml_edit::TomlError) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<uuid::Error> for ServiceError {
fn from(err: uuid::Error) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<serde_json::Error> for ServiceError {
fn from(err: serde_json::Error) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
impl From<&str> for ServiceError {
fn from(err: &str) -> ServiceError {
ServiceError::BadRequest(err.to_string())
}
}
#[derive(Debug, Display)]
pub enum ProcessError {
#[display("Failed to spawn ffmpeg/ffprobe. {}", _0)]
CommandSpawn(io::Error),
#[display("{}", _0)]
Custom(String),
#[display("IO error: {}", _0)]
IO(io::Error),
#[display("{}", _0)]
Ffprobe(FfProbeError),
#[display("Regex compile error {}", _0)]
Regex(String),
#[display("Thread error {}", _0)]
Thread(String),
}
impl From<std::io::Error> for ProcessError {
fn from(err: std::io::Error) -> ProcessError {
ProcessError::IO(err)
}
}
impl From<FfProbeError> for ProcessError {
fn from(err: FfProbeError) -> Self {
Self::Ffprobe(err)
}
}
impl From<lettre::address::AddressError> for ProcessError {
fn from(err: lettre::address::AddressError) -> ProcessError {
ProcessError::Custom(err.to_string())
}
}
impl From<lettre::transport::smtp::Error> for ProcessError {
fn from(err: lettre::transport::smtp::Error) -> ProcessError {
ProcessError::Custom(err.to_string())
}
}
impl From<lettre::error::Error> for ProcessError {
fn from(err: lettre::error::Error) -> ProcessError {
ProcessError::Custom(err.to_string())
}
}
impl<T> From<std::sync::PoisonError<T>> for ProcessError {
fn from(err: std::sync::PoisonError<T>) -> ProcessError {
ProcessError::Custom(err.to_string())
}
}
impl From<regex::Error> for ProcessError {
fn from(err: regex::Error) -> Self {
Self::Regex(err.to_string())
}
}
impl From<serde_json::Error> for ProcessError {
fn from(err: serde_json::Error) -> Self {
Self::Custom(err.to_string())
}
}
impl From<Box<dyn std::any::Any + std::marker::Send>> for ProcessError {
fn from(err: Box<dyn std::any::Any + std::marker::Send>) -> Self {
Self::Thread(format!("{err:?}"))
}
}

View File

@ -1,437 +0,0 @@
use std::{
io::Write,
path::{Path, PathBuf},
};
use actix_multipart::Multipart;
use actix_web::{web, HttpResponse};
use futures_util::TryStreamExt as _;
use lexical_sort::{natural_lexical_cmp, PathSort};
use rand::{distributions::Alphanumeric, Rng};
use relative_path::RelativePath;
use serde::{Deserialize, Serialize};
use tokio::fs;
use log::*;
use crate::db::models::Channel;
use crate::player::utils::{file_extension, MediaProbe};
use crate::utils::{config::PlayoutConfig, errors::ServiceError};
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct PathObject {
pub source: String,
parent: Option<String>,
parent_folders: Option<Vec<String>>,
folders: Option<Vec<String>>,
files: Option<Vec<VideoFile>>,
#[serde(default)]
pub folders_only: bool,
#[serde(default)]
pub recursive: bool,
}
impl PathObject {
fn new(source: String, parent: Option<String>) -> Self {
Self {
source,
parent,
parent_folders: Some(vec![]),
folders: Some(vec![]),
files: Some(vec![]),
folders_only: false,
recursive: false,
}
}
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct MoveObject {
source: String,
target: String,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct VideoFile {
name: String,
duration: f64,
}
/// Normalize absolut path
///
/// This function takes care, that it is not possible to break out from root_path.
pub fn norm_abs_path(
root_path: &Path,
input_path: &str,
) -> Result<(PathBuf, String, String), ServiceError> {
let path_relative = RelativePath::new(&root_path.to_string_lossy())
.normalize()
.to_string()
.replace("../", "");
let path_suffix = root_path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let mut source_relative = RelativePath::new(input_path)
.normalize()
.to_string()
.replace("../", "");
if input_path.starts_with(&*root_path.to_string_lossy())
|| source_relative.starts_with(&path_relative)
{
source_relative = source_relative
.strip_prefix(&path_relative)
.and_then(|s| s.strip_prefix('/'))
.unwrap_or_default()
.to_string();
} else {
source_relative = source_relative
.strip_prefix(&path_suffix)
.and_then(|s| s.strip_prefix('/'))
.unwrap_or(&source_relative)
.to_string();
}
let path = &root_path.join(&source_relative);
Ok((path.to_path_buf(), path_suffix, source_relative))
}
/// File Browser
///
/// Take input path and give file and folder list from it back.
/// Input should be a relative path segment, but when it is a absolut path, the norm_abs_path function
/// will take care, that user can not break out from given storage path in config.
pub async fn browser(
config: &PlayoutConfig,
channel: &Channel,
path_obj: &PathObject,
) -> Result<PathObject, ServiceError> {
let mut channel_extensions = channel
.extra_extensions
.split(',')
.map(|e| e.to_string())
.collect::<Vec<String>>();
let mut parent_folders = vec![];
let mut extensions = config.storage.extensions.clone();
extensions.append(&mut channel_extensions);
let (path, parent, path_component) = norm_abs_path(&config.channel.storage, &path_obj.source)?;
let parent_path = if !path_component.is_empty() {
path.parent().unwrap()
} else {
&config.channel.storage
};
let mut obj = PathObject::new(path_component, Some(parent));
obj.folders_only = path_obj.folders_only;
if path != parent_path && !path_obj.folders_only {
let mut parents = fs::read_dir(&parent_path).await?;
while let Some(child) = parents.next_entry().await? {
if child.metadata().await?.is_dir() {
parent_folders.push(
child
.path()
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
);
}
}
parent_folders.path_sort(natural_lexical_cmp);
obj.parent_folders = Some(parent_folders);
}
let mut paths_obj = fs::read_dir(path).await?;
let mut files = vec![];
let mut folders = vec![];
while let Some(child) = paths_obj.next_entry().await? {
let f_meta = child.metadata().await?;
// ignore hidden files/folders on unix
if child.path().to_string_lossy().to_string().contains("/.") {
continue;
}
if f_meta.is_dir() {
folders.push(
child
.path()
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
);
} else if f_meta.is_file() && !path_obj.folders_only {
if let Some(ext) = file_extension(&child.path()) {
if extensions.contains(&ext.to_string().to_lowercase()) {
files.push(child.path())
}
}
}
}
folders.path_sort(natural_lexical_cmp);
files.path_sort(natural_lexical_cmp);
let mut media_files = vec![];
for file in files {
match MediaProbe::new(file.to_string_lossy().as_ref()) {
Ok(probe) => {
let mut duration = 0.0;
if let Some(dur) = probe.format.duration {
duration = dur.parse().unwrap_or_default()
}
let video = VideoFile {
name: file.file_name().unwrap().to_string_lossy().to_string(),
duration,
};
media_files.push(video);
}
Err(e) => error!("{e:?}"),
};
}
obj.folders = Some(folders);
obj.files = Some(media_files);
Ok(obj)
}
pub async fn create_directory(
config: &PlayoutConfig,
path_obj: &PathObject,
) -> Result<HttpResponse, ServiceError> {
let (path, _, _) = norm_abs_path(&config.channel.storage, &path_obj.source)?;
if let Err(e) = fs::create_dir_all(&path).await {
return Err(ServiceError::BadRequest(e.to_string()));
}
info!(
"create folder: <b><magenta>{}</></b>",
path.to_string_lossy()
);
Ok(HttpResponse::Ok().into())
}
async fn copy_and_delete(source: &PathBuf, target: &PathBuf) -> Result<MoveObject, ServiceError> {
match fs::copy(&source, &target).await {
Ok(_) => {
if let Err(e) = fs::remove_file(source).await {
error!("{e}");
return Err(ServiceError::BadRequest(
"Removing File not possible!".into(),
));
};
return Ok(MoveObject {
source: source
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
target: target
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
});
}
Err(e) => {
error!("{e}");
Err(ServiceError::BadRequest("Error in file copy!".into()))
}
}
}
async fn rename(source: &PathBuf, target: &PathBuf) -> Result<MoveObject, ServiceError> {
match fs::rename(source, target).await {
Ok(_) => Ok(MoveObject {
source: source
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
target: target
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
}),
Err(e) => {
error!("{e}");
copy_and_delete(source, target).await
}
}
}
pub async fn rename_file(
config: &PlayoutConfig,
move_object: &MoveObject,
) -> Result<MoveObject, ServiceError> {
let (source_path, _, _) = norm_abs_path(&config.channel.storage, &move_object.source)?;
let (mut target_path, _, _) = norm_abs_path(&config.channel.storage, &move_object.target)?;
if !source_path.exists() {
return Err(ServiceError::BadRequest("Source file not exist!".into()));
}
if (source_path.is_dir() || source_path.is_file()) && source_path.parent() == Some(&target_path)
{
return rename(&source_path, &target_path).await;
}
if target_path.is_dir() {
target_path = target_path.join(source_path.file_name().unwrap());
}
if target_path.is_file() {
return Err(ServiceError::BadRequest(
"Target file already exists!".into(),
));
}
if source_path.is_file() && target_path.parent().is_some() {
return rename(&source_path, &target_path).await;
}
Err(ServiceError::InternalServerError)
}
pub async fn remove_file_or_folder(
config: &PlayoutConfig,
source_path: &str,
recursive: bool,
) -> Result<(), ServiceError> {
let (source, _, _) = norm_abs_path(&config.channel.storage, source_path)?;
if !source.exists() {
return Err(ServiceError::BadRequest("Source does not exists!".into()));
}
if source.is_dir() {
if recursive {
match fs::remove_dir_all(source).await {
Ok(_) => return Ok(()),
Err(e) => {
error!("{e}");
return Err(ServiceError::BadRequest(
"Delete folder and its content failed!".into(),
));
}
};
} else {
match fs::remove_dir(source).await {
Ok(_) => return Ok(()),
Err(e) => {
error!("{e}");
return Err(ServiceError::BadRequest(
"Delete folder failed! (Folder must be empty)".into(),
));
}
};
}
}
if source.is_file() {
match fs::remove_file(source).await {
Ok(_) => return Ok(()),
Err(e) => {
error!("{e}");
return Err(ServiceError::BadRequest("Delete file failed!".into()));
}
};
}
Err(ServiceError::InternalServerError)
}
async fn valid_path(config: &PlayoutConfig, path: &str) -> Result<PathBuf, ServiceError> {
let (test_path, _, _) = norm_abs_path(&config.channel.storage, path)?;
if !test_path.is_dir() {
return Err(ServiceError::BadRequest("Target folder not exists!".into()));
}
Ok(test_path)
}
pub async fn upload(
config: &PlayoutConfig,
_size: u64,
mut payload: Multipart,
path: &Path,
abs_path: bool,
) -> Result<HttpResponse, ServiceError> {
while let Some(mut field) = payload.try_next().await? {
let content_disposition = field.content_disposition().ok_or("No content")?;
debug!("{content_disposition}");
let rand_string: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(20)
.map(char::from)
.collect();
let filename = content_disposition
.get_filename()
.map_or_else(|| rand_string.to_string(), sanitize_filename::sanitize);
let filepath = if abs_path {
path.to_path_buf()
} else {
valid_path(config, &path.to_string_lossy())
.await?
.join(filename)
};
let filepath_clone = filepath.clone();
let _file_size = match filepath.metadata() {
Ok(metadata) => metadata.len(),
Err(_) => 0,
};
// INFO: File exist check should be enough because file size and content length are different.
// The error catching in the loop should normally prevent unfinished files from existing on disk.
// If this is not enough, a second check can be implemented: is_close(file_size as i64, size as i64, 1000)
if filepath.is_file() {
return Err(ServiceError::Conflict("Target already exists!".into()));
}
let mut f = web::block(|| std::fs::File::create(filepath_clone)).await??;
loop {
match field.try_next().await {
Ok(Some(chunk)) => {
f = web::block(move || f.write_all(&chunk).map(|_| f)).await??;
}
Ok(None) => break,
Err(e) => {
if e.to_string().contains("stream is incomplete") {
info!("Delete non finished file: {filepath:?}");
tokio::fs::remove_file(filepath).await?
}
return Err(e.into());
}
}
}
}
Ok(HttpResponse::Ok().into())
}

View File

@ -1,313 +0,0 @@
/// Simple Playlist Generator
///
/// You can call ffplayout[.exe] -g YYYY-mm-dd - YYYY-mm-dd to generate JSON playlists.
///
/// The generator takes the files from storage, which are set in config.
/// It also respect the shuffle/sort mode.
use std::{
fs::{create_dir_all, write},
io::Error,
};
use chrono::Timelike;
use lexical_sort::{natural_lexical_cmp, StringSort};
use log::*;
use rand::{seq::SliceRandom, thread_rng, Rng};
use walkdir::WalkDir;
use crate::player::{
controller::ChannelManager,
utils::{
folder::{fill_filler_list, FolderSource},
get_date_range, include_file_extension,
json_serializer::JsonPlaylist,
sum_durations, Media,
},
};
use crate::utils::{
config::{PlayoutConfig, Template},
logging::Target,
time_to_sec,
};
pub fn random_list(clip_list: Vec<Media>, total_length: f64) -> Vec<Media> {
let mut max_attempts = 10000;
let mut randomized_clip_list: Vec<Media> = vec![];
let mut target_duration = 0.0;
let clip_list_length = clip_list.len();
let usage_limit = (total_length / sum_durations(&clip_list)).floor() + 1.0;
let mut last_clip = Media::new(0, "", false);
while target_duration < total_length && max_attempts > 0 {
let index = rand::thread_rng().gen_range(0..clip_list_length);
let selected_clip = clip_list[index].clone();
let selected_clip_count = randomized_clip_list
.iter()
.filter(|&n| *n == selected_clip)
.count() as f64;
if selected_clip_count == usage_limit
|| last_clip == selected_clip
|| target_duration + selected_clip.duration > total_length
{
max_attempts -= 1;
continue;
}
target_duration += selected_clip.duration;
randomized_clip_list.push(selected_clip.clone());
max_attempts -= 1;
last_clip = selected_clip;
}
randomized_clip_list
}
pub fn ordered_list(clip_list: Vec<Media>, total_length: f64) -> Vec<Media> {
let mut index = 0;
let mut skip_count = 0;
let mut ordered_clip_list: Vec<Media> = vec![];
let mut target_duration = 0.0;
let clip_list_length = clip_list.len();
while target_duration < total_length && skip_count < clip_list_length {
if index == clip_list_length {
index = 0;
}
let selected_clip = clip_list[index].clone();
if sum_durations(&ordered_clip_list) + selected_clip.duration > total_length
|| (!ordered_clip_list.is_empty()
&& selected_clip == ordered_clip_list[ordered_clip_list.len() - 1])
{
skip_count += 1;
index += 1;
continue;
}
target_duration += selected_clip.duration;
ordered_clip_list.push(selected_clip);
index += 1;
}
ordered_clip_list
}
pub fn filler_list(config: &PlayoutConfig, total_length: f64) -> Vec<Media> {
let filler_list = fill_filler_list(config, None);
let mut index = 0;
let mut filler_clip_list: Vec<Media> = vec![];
let mut target_duration = 0.0;
let clip_list_length = filler_list.len();
if clip_list_length > 0 {
while target_duration < total_length {
if index == clip_list_length {
index = 0;
}
let selected_clip = filler_list[index].clone();
target_duration += selected_clip.duration;
filler_clip_list.push(selected_clip);
index += 1;
}
let over_length = target_duration - total_length;
let last_index = filler_clip_list.len() - 1;
filler_clip_list[last_index].out = filler_clip_list[last_index].duration - over_length;
}
filler_clip_list
}
pub fn generate_from_template(
config: &PlayoutConfig,
manager: &ChannelManager,
template: Template,
) -> FolderSource {
let mut media_list = vec![];
let mut rng = thread_rng();
let mut index: usize = 0;
let id = config.general.channel_id;
for source in template.sources {
let mut source_list = vec![];
let duration = (source.duration.hour() as f64 * 3600.0)
+ (source.duration.minute() as f64 * 60.0)
+ source.duration.second() as f64;
debug!(target: Target::all(), channel = id; "Generating playlist block with <yellow>{duration:.2}</> seconds length");
for path in source.paths {
debug!("Search files in <b><magenta>{path:?}</></b>");
let mut file_list = WalkDir::new(path.clone())
.into_iter()
.flat_map(|e| e.ok())
.filter(|f| f.path().is_file())
.filter(|f| include_file_extension(config, f.path()))
.map(|p| p.path().to_string_lossy().to_string())
.collect::<Vec<String>>();
if !source.shuffle {
file_list.string_sort_unstable(natural_lexical_cmp);
}
for entry in file_list {
let media = Media::new(0, &entry, true);
source_list.push(media);
}
}
let mut timed_list = if source.shuffle {
source_list.shuffle(&mut rng);
random_list(source_list, duration)
} else {
ordered_list(source_list, duration)
};
let total_length = sum_durations(&timed_list);
if duration > total_length {
let mut filler = filler_list(config, duration - total_length);
timed_list.append(&mut filler);
}
media_list.append(&mut timed_list);
}
for item in media_list.iter_mut() {
item.index = Some(index);
index += 1;
}
FolderSource::from_list(manager, media_list)
}
/// Generate playlists
pub fn playlist_generator(manager: &ChannelManager) -> Result<Vec<JsonPlaylist>, Error> {
let config = manager.config.lock().unwrap().clone();
let id = config.general.channel_id;
let channel_name = manager.channel.lock().unwrap().name.clone();
let total_length = match config.playlist.length_sec {
Some(length) => length,
None => {
if config.playlist.length.contains(':') {
time_to_sec(&config.playlist.length)
} else {
86400.0
}
}
};
let playlist_root = &config.channel.playlists;
let mut playlists = vec![];
let mut date_range = vec![];
let mut from_template = false;
if !playlist_root.is_dir() {
error!(
target: Target::all(), channel = id;
"Playlist folder <b><magenta>{:?}</></b> not exists!",
config.channel.playlists
);
}
if let Some(range) = config.general.generate.clone() {
date_range = range;
}
if date_range.contains(&"-".to_string()) && date_range.len() == 3 {
date_range = get_date_range(id, &date_range)
}
// gives an iterator with infinit length
let folder_iter = if let Some(template) = &config.general.template {
from_template = true;
generate_from_template(&config, manager, template.clone())
} else {
FolderSource::new(&config, manager.clone())
};
let list_length = manager.current_list.lock().unwrap().len();
for date in date_range {
let d: Vec<&str> = date.split('-').collect();
let year = d[0];
let month = d[1];
let playlist_path = playlist_root.join(year).join(month);
let playlist_file = &playlist_path.join(format!("{date}.json"));
let mut length = 0.0;
let mut round = 0;
create_dir_all(playlist_path)?;
if playlist_file.is_file() {
warn!(
target: Target::all(), channel = id;
"Playlist exists, skip: <b><magenta>{}</></b>",
playlist_file.display()
);
continue;
}
info!(
target: Target::all(), channel = id;
"Generate playlist: <b><magenta>{}</></b>",
playlist_file.display()
);
let mut playlist = JsonPlaylist {
channel: channel_name.clone(),
date,
path: None,
start_sec: None,
length: None,
modified: None,
program: vec![],
};
if from_template {
let media_list = manager.current_list.lock().unwrap();
playlist.program = media_list.to_vec();
} else {
for item in folder_iter.clone() {
let duration = item.duration;
if total_length >= length + duration {
playlist.program.push(item);
length += duration;
} else if round == list_length - 1 {
break;
} else {
round += 1;
}
}
let list_duration = sum_durations(&playlist.program);
if config.playlist.length_sec.unwrap() > list_duration {
let time_left = config.playlist.length_sec.unwrap() - list_duration;
let mut fillers = filler_list(&config, time_left);
playlist.program.append(&mut fillers);
}
}
let json: String = serde_json::to_string_pretty(&playlist)?;
write(playlist_file, json)?;
playlists.push(playlist);
}
Ok(playlists)
}

View File

@ -1,490 +0,0 @@
use std::{
collections::{hash_map, HashMap},
env,
io::{self, ErrorKind, Write},
path::PathBuf,
sync::{Arc, Mutex},
time::Duration,
};
use actix_web::rt::time::interval;
use flexi_logger::{
writers::{FileLogWriter, LogWriter},
Age, Cleanup, Criterion, DeferredNow, FileSpec, Level, LogSpecification, Logger, Naming,
};
use lettre::{
message::header, transport::smtp::authentication::Credentials, AsyncSmtpTransport,
AsyncTransport, Message, Tokio1Executor,
};
use log::{kv::Value, *};
use paris::formatter::colorize_string;
use regex::Regex;
use super::ARGS;
use crate::db::models::GlobalSettings;
use crate::utils::{config::Mail, errors::ProcessError, round_to_nearest_ten};
#[derive(Debug)]
pub struct Target;
impl Target {
pub fn all() -> &'static str {
if ARGS.log_to_console {
"{_Default}"
} else {
"{file,mail,_Default}"
}
}
pub fn console() -> &'static str {
"{console}"
}
pub fn file() -> &'static str {
"{file}"
}
pub fn mail() -> &'static str {
"{mail}"
}
pub fn file_mail() -> &'static str {
"{file,mail}"
}
}
pub struct LogConsole;
impl LogWriter for LogConsole {
fn write(&self, now: &mut DeferredNow, record: &Record<'_>) -> std::io::Result<()> {
console_formatter(&mut std::io::stderr(), now, record)?;
println!();
Ok(())
}
fn flush(&self) -> std::io::Result<()> {
Ok(())
}
}
struct MultiFileLogger {
log_path: PathBuf,
writers: Arc<Mutex<HashMap<i32, Arc<Mutex<FileLogWriter>>>>>,
}
impl MultiFileLogger {
pub fn new(log_path: PathBuf) -> Self {
MultiFileLogger {
log_path,
writers: Arc::new(Mutex::new(HashMap::new())),
}
}
fn get_writer(&self, channel: i32) -> io::Result<Arc<Mutex<FileLogWriter>>> {
let mut writers = self.writers.lock().unwrap();
if let hash_map::Entry::Vacant(e) = writers.entry(channel) {
let writer = FileLogWriter::builder(
FileSpec::default()
.suppress_timestamp()
.directory(&self.log_path)
.basename("ffplayout")
.discriminant(channel.to_string()),
)
.format(file_formatter)
.append()
.rotate(
Criterion::Age(Age::Day),
Naming::TimestampsCustomFormat {
current_infix: Some(""),
format: "%Y-%m-%d",
},
Cleanup::KeepLogFiles(ARGS.log_backup_count.unwrap_or(14)),
)
.try_build()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
e.insert(Arc::new(Mutex::new(writer)));
}
Ok(writers.get(&channel).unwrap().clone())
}
}
impl LogWriter for MultiFileLogger {
fn write(&self, now: &mut DeferredNow, record: &Record) -> io::Result<()> {
let channel = i32::try_from(
record
.key_values()
.get("channel".into())
.unwrap_or(Value::null())
.to_i64()
.unwrap_or(0),
)
.unwrap_or(0);
let writer = self.get_writer(channel);
let w = writer?.lock().unwrap().write(now, record);
w
}
fn flush(&self) -> io::Result<()> {
let writers = self.writers.lock().unwrap();
for writer in writers.values() {
writer.lock().unwrap().flush()?;
}
Ok(())
}
}
pub struct LogMailer {
pub mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>,
raw_lines: Arc<Mutex<Vec<String>>>,
}
impl LogMailer {
pub fn new(mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>) -> Self {
Self {
mail_queues,
raw_lines: Arc::new(Mutex::new(vec![])),
}
}
}
impl LogWriter for LogMailer {
fn write(&self, now: &mut DeferredNow, record: &Record<'_>) -> std::io::Result<()> {
let id = i32::try_from(
record
.key_values()
.get("channel".into())
.unwrap_or(Value::null())
.to_i64()
.unwrap_or(0),
)
.unwrap_or(0);
let mut queues = self.mail_queues.lock().unwrap_or_else(|poisoned| {
error!("Queues mutex was poisoned");
poisoned.into_inner()
});
for queue in queues.iter_mut() {
let mut q_lock = queue.lock().unwrap_or_else(|poisoned| {
error!("Queue mutex was poisoned");
poisoned.into_inner()
});
let msg = strip_tags(&record.args().to_string());
let mut raw_lines = self.raw_lines.lock().unwrap();
if q_lock.id == id && q_lock.level_eq(record.level()) && !raw_lines.contains(&msg) {
q_lock.push(format!(
"[{}] [{:>5}] {}",
now.now().format("%Y-%m-%d %H:%M:%S"),
record.level(),
msg.clone()
));
raw_lines.push(msg);
break;
}
if raw_lines.len() > 1000 {
let last = raw_lines.pop().unwrap();
raw_lines.clear();
raw_lines.push(last);
}
}
Ok(())
}
fn flush(&self) -> std::io::Result<()> {
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct MailQueue {
pub id: i32,
pub config: Mail,
pub lines: Vec<String>,
}
impl MailQueue {
pub fn new(id: i32, config: Mail) -> Self {
Self {
id,
config,
lines: vec![],
}
}
pub fn level_eq(&self, level: Level) -> bool {
level <= self.config.mail_level
}
pub fn update(&mut self, config: Mail) {
self.config = config;
}
pub fn clear(&mut self) {
self.lines.clear();
}
pub fn push(&mut self, line: String) {
self.lines.push(line);
}
fn text(&self) -> String {
self.lines.join("\n")
}
fn is_empty(&self) -> bool {
self.lines.is_empty()
}
}
fn strip_tags(input: &str) -> String {
let re = Regex::new(r"<[^>]*>").unwrap();
re.replace_all(input, "").to_string()
}
fn console_formatter(w: &mut dyn Write, now: &mut DeferredNow, record: &Record) -> io::Result<()> {
let log_line = match record.level() {
Level::Debug => colorize_string(format!("<bright-blue>[DEBUG]</> {}", record.args())),
Level::Error => colorize_string(format!("<bright-red>[ERROR]</> {}", record.args())),
Level::Info => colorize_string(format!("<bright-green>[ INFO]</> {}", record.args())),
Level::Trace => colorize_string(format!(
"<bright-yellow>[TRACE]</> {}:{} {}",
record.file().unwrap_or_default(),
record.line().unwrap_or_default(),
record.args()
)),
Level::Warn => colorize_string(format!("<yellow>[ WARN]</> {}", record.args())),
};
if ARGS.log_timestamp {
write!(
w,
"{} {}",
colorize_string(format!(
"<bright black>[{}]</>",
now.now().format("%Y-%m-%d %H:%M:%S%.6f")
)),
log_line
)
} else {
write!(w, "{}", log_line)
}
}
fn file_formatter(
w: &mut dyn Write,
now: &mut DeferredNow,
record: &Record,
) -> std::io::Result<()> {
write!(
w,
"[{}] [{:>5}] {}",
now.now().format("%Y-%m-%d %H:%M:%S%.6f"),
record.level(),
record.args()
)
}
pub fn log_file_path() -> PathBuf {
let config = GlobalSettings::global();
let mut log_path = PathBuf::from(&ARGS.logs.as_ref().unwrap_or(&config.logs));
if !log_path.is_absolute() {
log_path = env::current_dir().unwrap().join(log_path);
}
if !log_path.is_dir() {
log_path = env::current_dir().unwrap();
}
log_path
}
fn file_logger() -> Box<dyn LogWriter> {
if ARGS.log_to_console {
Box::new(LogConsole)
} else {
Box::new(MultiFileLogger::new(log_file_path()))
}
}
/// send log messages to mail recipient
pub async fn send_mail(config: &Mail, msg: String) -> Result<(), ProcessError> {
let recipient = config
.recipient
.split_terminator([',', ';', ' '])
.filter(|s| s.contains('@'))
.map(|s| s.trim())
.collect::<Vec<&str>>();
let mut message = Message::builder()
.from(config.sender_addr.parse()?)
.subject(&config.subject)
.header(header::ContentType::TEXT_PLAIN);
for r in recipient {
message = message.to(r.parse()?);
}
let mail = message.body(msg)?;
let credentials = Credentials::new(config.sender_addr.clone(), config.sender_pass.clone());
let mut transporter =
AsyncSmtpTransport::<Tokio1Executor>::relay(config.smtp_server.clone().as_str());
if config.starttls {
transporter = AsyncSmtpTransport::<Tokio1Executor>::starttls_relay(
config.smtp_server.clone().as_str(),
);
}
let mailer = transporter?.credentials(credentials).build();
// Send the mail
mailer.send(mail).await?;
Ok(())
}
/// Basic Mail Queue
///
/// Check every give seconds for messages and send them.
pub fn mail_queue(mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>) {
actix_web::rt::spawn(async move {
let sec = 10;
let mut interval = interval(Duration::from_secs(sec));
let mut counter = 0;
loop {
interval.tick().await;
let mut tasks = vec![];
// Reset the counter after one day
if counter >= 86400 {
counter = 0;
} else {
counter += sec;
}
{
let mut queues = match mail_queues.lock() {
Ok(l) => l,
Err(e) => {
error!("Failed to lock mail_queues {e}");
continue;
}
};
// Process mail queues and send emails
for queue in queues.iter_mut() {
let interval = round_to_nearest_ten(counter as i64);
let mut q_lock = queue.lock().unwrap_or_else(|poisoned| {
error!("Queue mutex was poisoned");
poisoned.into_inner()
});
let expire = round_to_nearest_ten(q_lock.config.interval.max(30));
if interval % expire == 0 && !q_lock.is_empty() {
if q_lock.config.recipient.contains('@') {
tasks.push((q_lock.config.clone(), q_lock.text().clone(), q_lock.id));
}
// Clear the messages after sending the email
q_lock.clear();
}
}
}
for (config, text, id) in tasks {
if let Err(e) = send_mail(&config, text).await {
error!(target: "{file}", channel = id; "Failed to send mail: {e}");
}
}
}
});
}
/// Initialize our logging, to have:
///
/// - console logger
/// - file logger
/// - mail logger
pub fn init_logging(mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>) -> io::Result<()> {
let log_level = match ARGS
.log_level
.clone()
.unwrap_or("debug".to_string())
.to_lowercase()
.as_str()
{
"debug" => LevelFilter::Debug,
"error" => LevelFilter::Error,
"info" => LevelFilter::Info,
"trace" => LevelFilter::Trace,
"warn" => LevelFilter::Warn,
"off" => LevelFilter::Off,
_ => LevelFilter::Debug,
};
mail_queue(mail_queues.clone());
// Build the initial log specification
let mut builder = LogSpecification::builder();
builder
.default(log_level)
.module("actix", LevelFilter::Info)
.module("actix_files", LevelFilter::Info)
.module("actix_web", LevelFilter::Info)
.module("actix_web_service", LevelFilter::Error)
.module("hyper", LevelFilter::Error)
.module("flexi_logger", LevelFilter::Error)
.module("libc", LevelFilter::Error)
.module("log", LevelFilter::Error)
.module("mio", LevelFilter::Error)
.module("neli", LevelFilter::Error)
.module("reqwest", LevelFilter::Error)
.module("rpc", LevelFilter::Error)
.module("rustls", LevelFilter::Error)
.module("serial_test", LevelFilter::Error)
.module("sqlx", LevelFilter::Error)
.module("tokio", LevelFilter::Error);
Logger::with(builder.build())
.format(console_formatter)
.log_to_stderr()
.add_writer("file", file_logger())
.add_writer("mail", Box::new(LogMailer::new(mail_queues)))
.start()
.map_err(|e| io::Error::new(ErrorKind::Other, e.to_string()))?;
Ok(())
}
/// Format ingest and HLS logging output
pub fn log_line(line: &str, level: &str) {
if line.contains("[info]") && level.to_lowercase() == "info" {
info!("<bright black>[Server]</> {}", line.replace("[info] ", ""))
} else if line.contains("[warning]")
&& (level.to_lowercase() == "warning" || level.to_lowercase() == "info")
{
warn!(
"<bright black>[Server]</> {}",
line.replace("[warning] ", "")
)
} else if line.contains("[error]")
&& !line.contains("Input/output error")
&& !line.contains("Broken pipe")
{
error!("<bright black>[Server]</> {}", line.replace("[error] ", ""));
} else if line.contains("[fatal]") {
error!("<bright black>[Server]</> {}", line.replace("[fatal] ", ""))
}
}

View File

@ -1,391 +0,0 @@
use std::{
env, fmt,
net::TcpListener,
path::{Path, PathBuf},
};
#[cfg(target_family = "unix")]
use std::os::unix::fs::MetadataExt;
use chrono::{format::ParseErrorKind, prelude::*};
use faccess::PathExt;
use log::*;
use path_clean::PathClean;
use rand::Rng;
use regex::Regex;
use tokio::{fs, process::Command};
use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize,
};
pub mod advanced_config;
pub mod args_parse;
pub mod channels;
pub mod config;
pub mod control;
pub mod errors;
pub mod files;
pub mod generator;
pub mod logging;
pub mod playlist;
pub mod system;
pub mod task_runner;
use crate::db::models::GlobalSettings;
use crate::player::utils::time_to_sec;
use crate::utils::{errors::ServiceError, logging::log_file_path};
use crate::ARGS;
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct TextFilter {
pub text: Option<String>,
#[serde(default, deserialize_with = "deserialize_number_or_string")]
pub x: Option<String>,
#[serde(default, deserialize_with = "deserialize_number_or_string")]
pub y: Option<String>,
#[serde(default, deserialize_with = "deserialize_number_or_string")]
pub fontsize: Option<String>,
#[serde(default, deserialize_with = "deserialize_number_or_string")]
pub line_spacing: Option<String>,
pub fontcolor: Option<String>,
#[serde(default, deserialize_with = "deserialize_number_or_string")]
pub alpha: Option<String>,
#[serde(default, deserialize_with = "deserialize_number_or_string")]
pub r#box: Option<String>,
pub boxcolor: Option<String>,
#[serde(default, deserialize_with = "deserialize_number_or_string")]
pub boxborderw: Option<String>,
}
/// Deserialize number or string
pub fn deserialize_number_or_string<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>
where
D: serde::Deserializer<'de>,
{
struct StringOrNumberVisitor;
impl<'de> Visitor<'de> for StringOrNumberVisitor {
type Value = Option<String>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string or a number")
}
fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
let re = Regex::new(r"0,([0-9]+)").unwrap();
let clean_string = re.replace_all(value, "0.$1").to_string();
Ok(Some(clean_string))
}
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Self::Value, E> {
Ok(Some(value.to_string()))
}
fn visit_i64<E: de::Error>(self, value: i64) -> Result<Self::Value, E> {
Ok(Some(value.to_string()))
}
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
Ok(Some(value.to_string()))
}
}
deserializer.deserialize_any(StringOrNumberVisitor)
}
impl fmt::Display for TextFilter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let escaped_text = self
.text
.clone()
.unwrap_or_default()
.replace('\'', "'\\\\\\''")
.replace('\\', "\\\\\\\\")
.replace('%', "\\\\\\%")
.replace(':', "\\:");
let mut s = format!("text='{escaped_text}'");
if let Some(v) = &self.x {
if !v.is_empty() {
s.push_str(&format!(":x='{v}'"));
}
}
if let Some(v) = &self.y {
if !v.is_empty() {
s.push_str(&format!(":y='{v}'"));
}
}
if let Some(v) = &self.fontsize {
if !v.is_empty() {
s.push_str(&format!(":fontsize={v}"));
}
}
if let Some(v) = &self.line_spacing {
if !v.is_empty() {
s.push_str(&format!(":line_spacing={v}"));
}
}
if let Some(v) = &self.fontcolor {
if !v.is_empty() {
s.push_str(&format!(":fontcolor={v}"));
}
}
if let Some(v) = &self.alpha {
if !v.is_empty() {
s.push_str(&format!(":alpha='{v}'"));
}
}
if let Some(v) = &self.r#box {
if !v.is_empty() {
s.push_str(&format!(":box={v}"));
}
}
if let Some(v) = &self.boxcolor {
if !v.is_empty() {
s.push_str(&format!(":boxcolor={v}"));
}
}
if let Some(v) = &self.boxborderw {
if !v.is_empty() {
s.push_str(&format!(":boxborderw={v}"));
}
}
write!(f, "{s}")
}
}
pub fn db_path() -> Result<&'static str, Box<dyn std::error::Error>> {
if let Some(path) = ARGS.db.clone() {
let mut absolute_path = if path.is_absolute() {
path
} else {
env::current_dir()?.join(path)
}
.clean();
if absolute_path.is_dir() {
absolute_path = absolute_path.join("ffplayout.db");
}
if let Some(abs_path) = absolute_path.parent() {
if abs_path.writable() {
return Ok(Box::leak(
absolute_path.to_string_lossy().to_string().into_boxed_str(),
));
}
error!("Given database path is not writable!");
}
}
let sys_path = Path::new("/usr/share/ffplayout/db");
let mut db_path = "./ffplayout.db";
if sys_path.is_dir() && !sys_path.writable() {
error!("Path {} is not writable!", sys_path.display());
}
if sys_path.is_dir() && sys_path.writable() {
db_path = "/usr/share/ffplayout/db/ffplayout.db";
} else if Path::new("./assets").is_dir() {
db_path = "./assets/ffplayout.db";
}
Ok(db_path)
}
pub fn public_path() -> PathBuf {
let config = GlobalSettings::global();
let dev_path = env::current_dir()
.unwrap_or_default()
.join("frontend/.output/public/");
let mut public_path = PathBuf::from(&config.public);
if let Some(p) = &ARGS.public {
// When public path is set as argument use this path for serving static files.
// Works only when feature embed_frontend is not set.
let public = PathBuf::from(p);
public_path = if public.is_absolute() {
public.to_path_buf()
} else {
env::current_dir().unwrap_or_default().join(public)
}
.clean();
} else if cfg!(debug_assertions) && dev_path.is_dir() {
public_path = dev_path;
}
public_path
}
pub async fn read_log_file(channel_id: &i32, date: &str) -> Result<String, ServiceError> {
let date_str = if date.is_empty() {
"".to_string()
} else {
format!("_{date}")
};
let log_path = log_file_path()
.join(format!("ffplayout_{channel_id}{date_str}.log"))
.clean();
let file_size = fs::metadata(&log_path).await?.len() as f64;
let log_content = if file_size > 5000000.0 {
error!("Log file to big: {}", sizeof_fmt(file_size));
format!("The log file is larger ({}) than the hard limit of 5MB, the probability is very high that something is wrong with the playout.\nCheck this on the server with `less {log_path:?}`.", sizeof_fmt(file_size))
} else {
fs::read_to_string(log_path).await?
};
Ok(log_content)
}
/// get human readable file size
pub fn sizeof_fmt(mut num: f64) -> String {
let suffix = 'B';
for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"] {
if num.abs() < 1024.0 {
return format!("{num:.1}{unit}{suffix}");
}
num /= 1024.0;
}
format!("{num:.1}Yi{suffix}")
}
pub fn local_utc_offset() -> i32 {
let mut offset = Local::now().format("%:z").to_string();
let operator = offset.remove(0);
let mut utc_offset = 0;
if let Some((r, f)) = offset.split_once(':') {
utc_offset = r.parse::<i32>().unwrap_or(0) * 60 + f.parse::<i32>().unwrap_or(0);
if operator == '-' && utc_offset > 0 {
utc_offset = -utc_offset;
}
}
utc_offset
}
pub fn naive_date_time_from_str<'de, D>(deserializer: D) -> Result<NaiveDateTime, D::Error>
where
D: Deserializer<'de>,
{
let s: String = Deserialize::deserialize(deserializer)?;
match NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S") {
Ok(date_time) => Ok(date_time),
Err(e) => {
if e.kind() == ParseErrorKind::TooShort {
NaiveDateTime::parse_from_str(&format!("{s}T00:00:00"), "%Y-%m-%dT%H:%M:%S")
.map_err(de::Error::custom)
} else {
NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S%#z").map_err(de::Error::custom)
}
}
}
}
/// get a free tcp socket
pub fn gen_tcp_socket(exclude_socket: String) -> Option<String> {
for _ in 0..100 {
let port = rand::thread_rng().gen_range(45321..54268);
let socket = format!("127.0.0.1:{port}");
if socket != exclude_socket && TcpListener::bind(("127.0.0.1", port)).is_ok() {
return Some(socket);
}
}
None
}
pub fn round_to_nearest_ten(num: i64) -> i64 {
if num % 10 >= 5 {
((num / 10) + 1) * 10
} else {
(num / 10) * 10
}
}
pub async fn copy_assets(storage_path: &Path) -> Result<(), std::io::Error> {
if storage_path.is_dir() {
let target = storage_path.join("00-assets");
let mut dummy_source = Path::new("/usr/share/ffplayout/dummy.vtt");
let mut font_source = Path::new("/usr/share/ffplayout/DejaVuSans.ttf");
let mut logo_source = Path::new("/usr/share/ffplayout/logo.png");
if !dummy_source.is_file() {
dummy_source = Path::new("./assets/dummy.vtt")
}
if !font_source.is_file() {
font_source = Path::new("./assets/DejaVuSans.ttf")
}
if !logo_source.is_file() {
logo_source = Path::new("./assets/logo.png")
}
if !target.is_dir() {
let dummy_target = target.join("dummy.vtt");
let font_target = target.join("DejaVuSans.ttf");
let logo_target = target.join("logo.png");
fs::create_dir_all(&target).await?;
fs::copy(&dummy_source, &dummy_target).await?;
fs::copy(&font_source, &font_target).await?;
fs::copy(&logo_source, &logo_target).await?;
#[cfg(target_family = "unix")]
{
let uid = nix::unistd::Uid::current();
let parent_owner = storage_path.metadata().unwrap().uid();
if uid.is_root() && uid.to_string() != parent_owner.to_string() {
let user = nix::unistd::User::from_uid(parent_owner.into())
.unwrap_or_default()
.unwrap();
nix::unistd::chown(&target, Some(user.uid), Some(user.gid))?;
if dummy_target.is_file() {
nix::unistd::chown(&dummy_target, Some(user.uid), Some(user.gid))?;
}
if font_target.is_file() {
nix::unistd::chown(&font_target, Some(user.uid), Some(user.gid))?;
}
if logo_target.is_file() {
nix::unistd::chown(&logo_target, Some(user.uid), Some(user.gid))?;
}
}
}
}
} else {
error!("Storage path {storage_path:?} not exists!");
}
Ok(())
}
/// Combined function to check if the program is running inside a container.
/// Returns `true` if running inside a container, otherwise `false`.
pub async fn is_running_in_container() -> bool {
// Check for Docker or Podman specific files
if Path::new("/.dockerenv").exists() || Path::new("/run/.containerenv").exists() {
return true;
}
// Run `systemd-detect-virt -c` to check if we are in a container
if let Ok(output) = Command::new("systemd-detect-virt").arg("-c").output().await {
return output.status.success();
}
false
}

View File

@ -1,176 +0,0 @@
use std::fmt;
use local_ip_address::list_afinet_netifas;
use serde::Serialize;
use sysinfo::System;
use crate::utils::config::PlayoutConfig;
use crate::{DISKS, NETWORKS, SYS};
const IGNORE_INTERFACES: [&str; 7] = ["docker", "lxdbr", "tab", "tun", "virbr", "veth", "vnet"];
#[derive(Debug, Serialize)]
pub struct Cpu {
pub cores: f32,
pub usage: f32,
}
#[derive(Debug, Default, Serialize)]
pub struct Storage {
pub path: String,
pub total: u64,
pub used: u64,
}
#[derive(Debug, Serialize)]
pub struct Load {
pub one: f64,
pub five: f64,
pub fifteen: f64,
}
#[derive(Debug, Serialize)]
pub struct Memory {
pub total: u64,
pub used: u64,
pub free: u64,
}
#[derive(Debug, Default, Serialize)]
pub struct Network {
pub name: String,
pub current_in: u64,
pub total_in: u64,
pub current_out: u64,
pub total_out: u64,
}
#[derive(Debug, Serialize)]
pub struct MySystem {
pub name: Option<String>,
pub kernel: Option<String>,
pub version: Option<String>,
pub ffp_version: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct Swap {
pub total: u64,
pub used: u64,
pub free: u64,
}
#[derive(Debug, Serialize)]
pub struct SystemStat {
pub cpu: Cpu,
pub load: Load,
pub memory: Memory,
pub network: Network,
pub storage: Storage,
pub swap: Swap,
pub system: MySystem,
}
impl fmt::Display for SystemStat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", serde_json::to_string(self).unwrap())
}
}
pub fn stat(config: PlayoutConfig) -> SystemStat {
let mut disks = DISKS.lock().unwrap();
let mut networks = NETWORKS.lock().unwrap();
let mut sys = SYS.lock().unwrap();
let network_interfaces = list_afinet_netifas().unwrap_or_default();
let mut usage = 0.0;
let mut interfaces = vec![];
for (name, ip) in network_interfaces.iter() {
if !ip.is_loopback()
&& !IGNORE_INTERFACES
.iter()
.any(|&prefix| name.starts_with(prefix))
{
interfaces.push((name, ip))
}
}
interfaces.dedup_by(|a, b| a.0 == b.0);
disks.refresh();
networks.refresh();
sys.refresh_cpu_usage();
sys.refresh_memory();
let cores = sys.cpus().len() as f32;
for cpu in sys.cpus() {
usage += cpu.cpu_usage();
}
let cpu = Cpu {
cores,
usage: usage * cores / 100.0,
};
let mut storage = Storage::default();
for disk in &*disks {
if disk.mount_point().to_string_lossy().len() > 1
&& config.channel.storage.starts_with(disk.mount_point())
{
storage.path = disk.name().to_string_lossy().to_string();
storage.total = disk.total_space();
storage.used = disk.available_space();
}
}
let load_avg = System::load_average();
let load = Load {
one: load_avg.one,
five: load_avg.five,
fifteen: load_avg.fifteen,
};
let memory = Memory {
total: sys.total_memory(),
used: sys.used_memory(),
free: sys.total_memory() - sys.used_memory(),
};
let mut network = Network::default();
for (interface_name, data) in &*networks {
if !interfaces.is_empty() && interface_name == interfaces[0].0 {
network.name.clone_from(interface_name);
network.current_in = data.received();
network.total_in = data.total_received();
network.current_out = data.transmitted();
network.total_out = data.total_transmitted();
}
}
let swap = Swap {
total: sys.total_swap(),
used: sys.used_swap(),
free: sys.free_swap(),
};
let system = MySystem {
name: System::name(),
kernel: System::kernel_version(),
version: System::os_version(),
ffp_version: Some(env!("CARGO_PKG_VERSION").to_string()),
};
SystemStat {
cpu,
storage,
load,
memory,
network,
system,
swap,
}
}

View File

@ -1,27 +0,0 @@
use std::process::Command;
use log::*;
use crate::player::utils::get_data_map;
use crate::player::controller::ChannelManager;
pub fn run(manager: ChannelManager) {
let task_path = manager.config.lock().unwrap().task.path.clone();
let obj = serde_json::to_string(&get_data_map(&manager)).unwrap();
trace!("Run task: {obj}");
match Command::new(task_path).arg(obj).spawn() {
Ok(mut c) => {
let status = c.wait().expect("Error in waiting for the task process!");
if !status.success() {
error!("Process stops with error.");
}
}
Err(e) => {
error!("Couldn't spawn task runner: {e}")
}
}
}

42
ffplayout-api/Cargo.toml Normal file
View File

@ -0,0 +1,42 @@
[package]
name = "ffplayout-api"
description = "Rest API for ffplayout"
readme = "README.md"
version.workspace = true
license.workspace = true
authors.workspace = true
repository.workspace = true
edition.workspace = true
[dependencies]
ffplayout-lib = { path = "../lib" }
actix-files = "0.6"
actix-multipart = "0.5"
actix-web = "4"
actix-web-grants = "3"
actix-web-httpauth = "0.6"
argon2 = "0.4"
chrono = { version = "0.4", default-features = false, features = ["clock", "std"] }
clap = { version = "4.3", features = ["derive"] }
derive_more = "0.99"
faccess = "0.2"
futures-util = { version = "0.3", default-features = false, features = ["std"] }
jsonwebtoken = "8"
lexical-sort = "0.3"
once_cell = "1.10"
rand = "0.8"
regex = "1"
relative-path = "1.6"
reqwest = { version = "0.11", default-features = false, features = ["blocking", "json", "rustls-tls"] }
rpassword = "6.0"
sanitize-filename = "0.3"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_yaml = "0.9"
simplelog = { version = "^0.12", features = ["paris"] }
sqlx = { version = "0.7", features = ["runtime-tokio", "sqlite"] }
tokio = { version = "1.25", features = ["full"] }
[[bin]]
name = "ffpapi"
path = "src/main.rs"

63
ffplayout-api/README.md Normal file
View File

@ -0,0 +1,63 @@
**ffplayout-api**
================
ffplayout-api (ffpapi) is a non strict REST API for ffplayout. It makes it possible to control the engine, read and manipulate the config, save playlist, etc.
To be able to use the API it is necessary to initialize the settings database first. To do that, run:
```BASH
ffpapi -i
```
Then add an admin user:
```BASH
ffpapi -u <USERNAME> -p <PASSWORD> -m <MAIL ADDRESS>
```
Then run the API thru the systemd service, or like:
```BASH
ffpapi -l 127.0.0.1:8787
```
Possible Arguments
-----
```BASH
OPTIONS:
-a, --ask ask for user credentials
-d, --domain <DOMAIN> domain name for initialization
-h, --help Print help information
-i, --init Initialize Database
-l, --listen <LISTEN> Listen on IP:PORT, like: 127.0.0.1:8787
-m, --mail <MAIL> Admin mail address
-p, --password <PASSWORD> Admin password
-u, --username <USERNAME> Create admin user
-V, --version Print version information
```
If you plan to run ffpapi with systemd set permission from **/usr/share/ffplayout** and content to user **ffpu:ffpu**. User **ffpu** has to be created.
**For possible endpoints read: [api endpoints](/docs/api.md)**
ffpapi can also serve the browser based frontend, just run in your browser `127.0.0.1:8787`.
"Piggyback" Mode
-----
ffplayout was originally planned to run under Linux as a SystemD service. It is also designed so that the engine and ffpapi run completely independently of each other. This is to increase flexibility and stability.
Nevertheless, programs compiled in Rust can basically run on all systems supported by the language. And so this repo also offers binaries for other platforms.
In the past, however, it was only possible under Linux to start/stop/restart the ffplayout engine process through ffpapi. This limit no longer exists since v0.17.0, because the "piggyback" mode was introduced here. This means that ffpapi recognizes which platform it is running on, and if it is not on Linux, it starts the engine as a child process. Thus it is now possible to control ffplayout engine completely on all platforms. The disadvantage here is that the engine process is dependent on ffpapi; if it closes or crashes, the engine also closes.
Under Linux, this mode can be simulated by starting ffpapi with the environment variable `PIGGYBACK_MODE=true`. This scenario is also conceivable in container operation, for example.
**Run in piggyback mode:**
```BASH
PIGGYBACK_MODE=True ffpapi -l 127.0.0.1:8787
```
This function is experimental, use it with caution.

View File

@ -1,13 +1,10 @@
use actix_web::error::ErrorUnauthorized;
use actix_web::Error;
use chrono::{TimeDelta, Utc};
use chrono::{Duration, Utc};
use jsonwebtoken::{self, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
use crate::{
db::models::{GlobalSettings, Role},
utils::errors::ServiceError,
};
use crate::utils::GlobalSettings;
// Token lifetime
const JWT_EXPIRATION_DAYS: i64 = 7;
@ -15,39 +12,34 @@ const JWT_EXPIRATION_DAYS: i64 = 7;
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct Claims {
pub id: i32,
pub channels: Vec<i32>,
pub username: String,
pub role: Role,
pub role: String,
exp: i64,
}
impl Claims {
pub fn new(id: i32, channels: Vec<i32>, username: String, role: Role) -> Self {
pub fn new(id: i32, username: String, role: String) -> Self {
Self {
id,
channels,
username,
role,
exp: (Utc::now() + TimeDelta::try_days(JWT_EXPIRATION_DAYS).unwrap()).timestamp(),
exp: (Utc::now() + Duration::days(JWT_EXPIRATION_DAYS)).timestamp(),
}
}
}
/// Create a json web token (JWT)
pub async fn create_jwt(claims: Claims) -> Result<String, ServiceError> {
pub fn create_jwt(claims: Claims) -> Result<String, Error> {
let config = GlobalSettings::global();
let encoding_key = EncodingKey::from_secret(config.secret.clone().unwrap().as_bytes());
Ok(jsonwebtoken::encode(
&Header::default(),
&claims,
&encoding_key,
)?)
let encoding_key = EncodingKey::from_secret(config.secret.as_bytes());
jsonwebtoken::encode(&Header::default(), &claims, &encoding_key)
.map_err(|e| ErrorUnauthorized(e.to_string()))
}
/// Decode a json web token (JWT)
pub async fn decode_jwt(token: &str) -> Result<Claims, Error> {
let config = GlobalSettings::global();
let decoding_key = DecodingKey::from_secret(config.secret.clone().unwrap().as_bytes());
let decoding_key = DecodingKey::from_secret(config.secret.as_bytes());
jsonwebtoken::decode::<Claims>(token, &decoding_key, &Validation::default())
.map(|data| data.claims)
.map_err(|e| ErrorUnauthorized(e.to_string()))

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,332 @@
use std::env;
use argon2::{
password_hash::{rand_core::OsRng, SaltString},
Argon2, PasswordHasher,
};
use rand::{distributions::Alphanumeric, Rng};
use simplelog::*;
use sqlx::{migrate::MigrateDatabase, sqlite::SqliteQueryResult, Pool, Sqlite};
use crate::db::{
db_pool,
models::{Channel, TextPreset, User},
};
use crate::utils::{db_path, local_utc_offset, GlobalSettings};
#[derive(Debug, sqlx::FromRow)]
struct Role {
name: String,
}
async fn create_schema(conn: &Pool<Sqlite>) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "PRAGMA foreign_keys = ON;
CREATE TABLE IF NOT EXISTS global
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
secret TEXT NOT NULL,
UNIQUE(secret)
);
CREATE TABLE IF NOT EXISTS roles
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
UNIQUE(name)
);
CREATE TABLE IF NOT EXISTS channels
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
preview_url TEXT NOT NULL,
config_path TEXT NOT NULL,
extra_extensions TEXT NOT NULL,
service TEXT NOT NULL,
UNIQUE(name, service)
);
CREATE TABLE IF NOT EXISTS presets
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
text TEXT NOT NULL,
x TEXT NOT NULL,
y TEXT NOT NULL,
fontsize TEXT NOT NULL,
line_spacing TEXT NOT NULL,
fontcolor TEXT NOT NULL,
box TEXT NOT NULL,
boxcolor TEXT NOT NULL,
boxborderw TEXT NOT NULL,
alpha TEXT NOT NULL,
channel_id INTEGER NOT NULL DEFAULT 1,
FOREIGN KEY (channel_id) REFERENCES channels (id) ON UPDATE SET NULL ON DELETE SET NULL,
UNIQUE(name)
);
CREATE TABLE IF NOT EXISTS user
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
mail TEXT NOT NULL,
username TEXT NOT NULL,
password TEXT NOT NULL,
salt TEXT NOT NULL,
role_id INTEGER NOT NULL DEFAULT 2,
channel_id INTEGER NOT NULL DEFAULT 1,
FOREIGN KEY (role_id) REFERENCES roles (id) ON UPDATE SET NULL ON DELETE SET NULL,
FOREIGN KEY (channel_id) REFERENCES channels (id) ON UPDATE SET NULL ON DELETE SET NULL,
UNIQUE(mail, username)
);";
sqlx::query(query).execute(conn).await
}
pub async fn db_init(domain: Option<String>) -> Result<&'static str, Box<dyn std::error::Error>> {
let db_path = db_path()?;
if !Sqlite::database_exists(&db_path).await.unwrap_or(false) {
Sqlite::create_database(&db_path).await.unwrap();
let pool = db_pool().await?;
match create_schema(&pool).await {
Ok(_) => info!("Database created Successfully"),
Err(e) => panic!("{e}"),
}
}
let secret: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(80)
.map(char::from)
.collect();
let url = match domain {
Some(d) => format!("http://{d}/live/stream.m3u8"),
None => "http://localhost/live/stream.m3u8".to_string(),
};
let config_path = if env::consts::OS == "linux" {
"/etc/ffplayout/ffplayout.yml"
} else {
"./assets/ffplayout.yml"
};
let query = "CREATE TRIGGER global_row_count
BEFORE INSERT ON global
WHEN (SELECT COUNT(*) FROM global) >= 1
BEGIN
SELECT RAISE(FAIL, 'Database is already initialized!');
END;
INSERT INTO global(secret) VALUES($1);
INSERT INTO channels(name, preview_url, config_path, extra_extensions, service)
VALUES('Channel 1', $2, $3, 'jpg,jpeg,png', 'ffplayout.service');
INSERT INTO roles(name) VALUES('admin'), ('user'), ('guest');
INSERT INTO presets(name, text, x, y, fontsize, line_spacing, fontcolor, box, boxcolor, boxborderw, alpha, channel_id)
VALUES('Default', 'Wellcome to ffplayout messenger!', '(w-text_w)/2', '(h-text_h)/2', '24', '4', '#ffffff@0xff', '0', '#000000@0x80', '4', '1.0', '1'),
('Empty Text', '', '0', '0', '24', '4', '#000000', '0', '#000000', '0', '0', '1'),
('Bottom Text fade in', 'The upcoming event will be delayed by a few minutes.', '(w-text_w)/2', '(h-line_h)*0.9', '24', '4', '#ffffff',
'1', '#000000@0x80', '4', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),0,if(lt(t,ld(1)+2),(t-(ld(1)+1))/1,if(lt(t,ld(1)+8),1,if(lt(t,ld(1)+9),(1-(t-(ld(1)+8)))/1,0))))', '1'),
('Scrolling Text', 'We have a very important announcement to make.', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),w+4,w-w/12*mod(t-ld(1),12*(w+tw)/w))', '(h-line_h)*0.9',
'24', '4', '#ffffff', '1', '#000000@0x80', '4', '1.0', '1');";
let pool = db_pool().await?;
sqlx::query(query)
.bind(secret)
.bind(url)
.bind(config_path)
.execute(&pool)
.await?;
Ok("Database initialized!")
}
pub async fn select_global(conn: &Pool<Sqlite>) -> Result<GlobalSettings, sqlx::Error> {
let query = "SELECT secret FROM global WHERE id = 1";
sqlx::query_as(query).fetch_one(conn).await
}
pub async fn select_channel(conn: &Pool<Sqlite>, id: &i32) -> Result<Channel, sqlx::Error> {
let query = "SELECT * FROM channels WHERE id = $1";
let mut result: Channel = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
result.utc_offset = local_utc_offset();
Ok(result)
}
pub async fn select_all_channels(conn: &Pool<Sqlite>) -> Result<Vec<Channel>, sqlx::Error> {
let query = "SELECT * FROM channels";
let mut results: Vec<Channel> = sqlx::query_as(query).fetch_all(conn).await?;
for result in results.iter_mut() {
result.utc_offset = local_utc_offset();
}
Ok(results)
}
pub async fn update_channel(
conn: &Pool<Sqlite>,
id: i32,
channel: Channel,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "UPDATE channels SET name = $2, preview_url = $3, config_path = $4, extra_extensions = $5 WHERE id = $1";
sqlx::query(query)
.bind(id)
.bind(channel.name)
.bind(channel.preview_url)
.bind(channel.config_path)
.bind(channel.extra_extensions)
.execute(conn)
.await
}
pub async fn insert_channel(conn: &Pool<Sqlite>, channel: Channel) -> Result<Channel, sqlx::Error> {
let query = "INSERT INTO channels (name, preview_url, config_path, extra_extensions, service) VALUES($1, $2, $3, $4, $5)";
let result = sqlx::query(query)
.bind(channel.name)
.bind(channel.preview_url)
.bind(channel.config_path)
.bind(channel.extra_extensions)
.bind(channel.service)
.execute(conn)
.await?;
sqlx::query_as("SELECT * FROM channels WHERE id = $1")
.bind(result.last_insert_rowid())
.fetch_one(conn)
.await
}
pub async fn delete_channel(
conn: &Pool<Sqlite>,
id: &i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM channels WHERE id = $1";
sqlx::query(query).bind(id).execute(conn).await
}
pub async fn select_last_channel(conn: &Pool<Sqlite>) -> Result<i32, sqlx::Error> {
let query = "SELECT id FROM channels ORDER BY id DESC LIMIT 1;";
sqlx::query_scalar(query).fetch_one(conn).await
}
pub async fn select_role(conn: &Pool<Sqlite>, id: &i32) -> Result<String, sqlx::Error> {
let query = "SELECT name FROM roles WHERE id = $1";
let result: Role = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
Ok(result.name)
}
pub async fn select_login(conn: &Pool<Sqlite>, user: &str) -> Result<User, sqlx::Error> {
let query = "SELECT id, mail, username, password, salt, role_id FROM user WHERE username = $1";
sqlx::query_as(query).bind(user).fetch_one(conn).await
}
pub async fn select_user(conn: &Pool<Sqlite>, user: &str) -> Result<User, sqlx::Error> {
let query = "SELECT id, mail, username, role_id FROM user WHERE username = $1";
sqlx::query_as(query).bind(user).fetch_one(conn).await
}
pub async fn insert_user(
conn: &Pool<Sqlite>,
user: User,
) -> Result<SqliteQueryResult, sqlx::Error> {
let salt = SaltString::generate(&mut OsRng);
let password_hash = Argon2::default()
.hash_password(user.password.clone().as_bytes(), &salt)
.unwrap();
let query =
"INSERT INTO user (mail, username, password, salt, role_id) VALUES($1, $2, $3, $4, $5)";
sqlx::query(query)
.bind(user.mail)
.bind(user.username)
.bind(password_hash.to_string())
.bind(salt.to_string())
.bind(user.role_id)
.execute(conn)
.await
}
pub async fn update_user(
conn: &Pool<Sqlite>,
id: i32,
fields: String,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = format!("UPDATE user SET {fields} WHERE id = $1");
sqlx::query(&query).bind(id).execute(conn).await
}
pub async fn select_presets(conn: &Pool<Sqlite>, id: i32) -> Result<Vec<TextPreset>, sqlx::Error> {
let query = "SELECT * FROM presets WHERE channel_id = $1";
sqlx::query_as(query).bind(id).fetch_all(conn).await
}
pub async fn update_preset(
conn: &Pool<Sqlite>,
id: &i32,
preset: TextPreset,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"UPDATE presets SET name = $1, text = $2, x = $3, y = $4, fontsize = $5, line_spacing = $6,
fontcolor = $7, alpha = $8, box = $9, boxcolor = $10, boxborderw = 11 WHERE id = $12";
sqlx::query(query)
.bind(preset.name)
.bind(preset.text)
.bind(preset.x)
.bind(preset.y)
.bind(preset.fontsize)
.bind(preset.line_spacing)
.bind(preset.fontcolor)
.bind(preset.alpha)
.bind(preset.r#box)
.bind(preset.boxcolor)
.bind(preset.boxborderw)
.bind(id)
.execute(conn)
.await
}
pub async fn insert_preset(
conn: &Pool<Sqlite>,
preset: TextPreset,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query =
"INSERT INTO presets (channel_id, name, text, x, y, fontsize, line_spacing, fontcolor, alpha, box, boxcolor, boxborderw)
VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)";
sqlx::query(query)
.bind(preset.channel_id)
.bind(preset.name)
.bind(preset.text)
.bind(preset.x)
.bind(preset.y)
.bind(preset.fontsize)
.bind(preset.line_spacing)
.bind(preset.fontcolor)
.bind(preset.alpha)
.bind(preset.r#box)
.bind(preset.boxcolor)
.bind(preset.boxborderw)
.execute(conn)
.await
}
pub async fn delete_preset(
conn: &Pool<Sqlite>,
id: &i32,
) -> Result<SqliteQueryResult, sqlx::Error> {
let query = "DELETE FROM presets WHERE id = $1;";
sqlx::query(query).bind(id).execute(conn).await
}

View File

@ -0,0 +1,13 @@
use sqlx::{Pool, Sqlite, SqlitePool};
pub mod handles;
pub mod models;
use crate::utils::db_path;
pub async fn db_pool() -> Result<Pool<Sqlite>, sqlx::Error> {
let db_path = db_path().unwrap();
let conn = SqlitePool::connect(&db_path).await?;
Ok(conn)
}

View File

@ -0,0 +1,74 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct User {
#[sqlx(default)]
#[serde(skip_deserializing)]
pub id: i32,
#[sqlx(default)]
pub mail: Option<String>,
pub username: String,
#[sqlx(default)]
#[serde(skip_serializing, default = "empty_string")]
pub password: String,
#[sqlx(default)]
#[serde(skip_serializing)]
pub salt: Option<String>,
#[sqlx(default)]
#[serde(skip_serializing)]
pub role_id: Option<i32>,
#[sqlx(default)]
#[serde(skip_serializing)]
pub channel_id: Option<i32>,
#[sqlx(default)]
pub token: Option<String>,
}
fn empty_string() -> String {
"".to_string()
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct LoginUser {
pub id: i32,
pub username: String,
}
impl LoginUser {
pub fn new(id: i32, username: String) -> Self {
Self { id, username }
}
}
#[derive(Debug, Deserialize, Serialize, Clone, sqlx::FromRow)]
pub struct TextPreset {
#[sqlx(default)]
#[serde(skip_deserializing)]
pub id: i32,
pub channel_id: i32,
pub name: String,
pub text: String,
pub x: String,
pub y: String,
pub fontsize: String,
pub line_spacing: String,
pub fontcolor: String,
pub r#box: String,
pub boxcolor: String,
pub boxborderw: String,
pub alpha: String,
}
#[derive(Debug, Deserialize, Serialize, sqlx::FromRow)]
pub struct Channel {
#[serde(skip_deserializing)]
pub id: i32,
pub name: String,
pub preview_url: String,
pub config_path: String,
pub extra_extensions: String,
pub service: String,
#[sqlx(default)]
#[serde(default)]
pub utc_offset: i32,
}

150
ffplayout-api/src/main.rs Normal file
View File

@ -0,0 +1,150 @@
use std::{path::Path, process::exit};
use actix_files::Files;
use actix_web::{dev::ServiceRequest, middleware, web, App, Error, HttpMessage, HttpServer};
use actix_web_grants::permissions::AttachPermissions;
use actix_web_httpauth::extractors::bearer::BearerAuth;
use actix_web_httpauth::middleware::HttpAuthentication;
use clap::Parser;
use simplelog::*;
pub mod api;
pub mod db;
pub mod utils;
use api::{
auth,
routes::{
add_channel, add_dir, add_preset, add_user, control_playout, del_playlist, delete_preset,
file_browser, gen_playlist, get_all_channels, get_channel, get_log, get_playlist,
get_playout_config, get_presets, get_program, get_user, import_playlist, login,
media_current, media_last, media_next, move_rename, patch_channel, process_control, remove,
remove_channel, save_file, save_playlist, send_text_message, update_playout_config,
update_preset, update_user,
},
};
use db::{db_pool, models::LoginUser};
use utils::{args_parse::Args, control::ProcessControl, db_path, init_config, run_args, Role};
use ffplayout_lib::utils::{init_logging, PlayoutConfig};
async fn validator(req: ServiceRequest, credentials: BearerAuth) -> Result<ServiceRequest, Error> {
// We just get permissions from JWT
let claims = auth::decode_jwt(credentials.token()).await?;
req.attach(vec![Role::set_role(&claims.role)]);
req.extensions_mut()
.insert(LoginUser::new(claims.id, claims.username));
Ok(req)
}
fn public_path() -> &'static str {
if Path::new("/usr/share/ffplayout/public/").is_dir() {
return "/usr/share/ffplayout/public/";
}
if Path::new("./public/").is_dir() {
return "./public/";
}
"./ffplayout-frontend/dist"
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let args = Args::parse();
let mut config = PlayoutConfig::new(None);
config.mail.recipient = String::new();
config.logging.log_to_file = false;
config.logging.timestamp = false;
let logging = init_logging(&config, None, None);
CombinedLogger::init(logging).unwrap();
if let Err(c) = run_args(args.clone()).await {
exit(c);
}
let pool = match db_pool().await {
Ok(p) => p,
Err(e) => {
error!("{e}");
exit(1);
}
};
if let Some(conn) = args.listen {
if let Ok(p) = db_path() {
if !Path::new(&p).is_file() {
error!("Database is not initialized! Init DB first and add admin user.");
exit(1);
}
}
init_config(&pool).await;
let ip_port = conn.split(':').collect::<Vec<&str>>();
let addr = ip_port[0];
let port = ip_port[1].parse::<u16>().unwrap();
let engine_process = web::Data::new(ProcessControl::new());
info!("running ffplayout API, listen on {conn}");
// no allow origin here, give it to the reverse proxy
HttpServer::new(move || {
let auth = HttpAuthentication::bearer(validator);
let db_pool = web::Data::new(pool.clone());
App::new()
.app_data(db_pool)
.app_data(engine_process.clone())
.wrap(middleware::Logger::default())
.service(login)
.service(
web::scope("/api")
.wrap(auth)
.service(add_user)
.service(get_user)
.service(get_playout_config)
.service(update_playout_config)
.service(add_preset)
.service(get_presets)
.service(update_preset)
.service(delete_preset)
.service(get_channel)
.service(get_all_channels)
.service(patch_channel)
.service(add_channel)
.service(remove_channel)
.service(update_user)
.service(send_text_message)
.service(control_playout)
.service(media_current)
.service(media_next)
.service(media_last)
.service(process_control)
.service(get_playlist)
.service(save_playlist)
.service(gen_playlist)
.service(del_playlist)
.service(get_log)
.service(file_browser)
.service(add_dir)
.service(move_rename)
.service(remove)
.service(save_file)
.service(import_playlist)
.service(get_program),
)
.service(Files::new("/", public_path()).index_file("index.html"))
})
.bind((addr, port))?
.run()
.await
} else {
error!("Run ffpapi with listen parameter!");
Ok(())
}
}

Some files were not shown because too many files have changed in this diff Show More