Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
45c250ab52 | ||
|
9ebaca5c6b |
@ -1,2 +0,0 @@
|
|||||||
[env]
|
|
||||||
TS_RS_EXPORT_DIR = { value = "frontend/types", relative = true }
|
|
2
.github/FUNDING.yml
vendored
@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
github: [jb-alvarado]
|
github: [jb-alvarado]
|
||||||
custom: PayPal.Me/jonaBaec
|
custom: PayPal.Me/jonaBaec
|
||||||
open_collective: ffplayout
|
#open_collective: ffplayout
|
||||||
|
22
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -7,16 +7,11 @@ assignees: ''
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!--
|
**Describe the bug**
|
||||||
Note: use this template only when you have a bug to report!
|
|
||||||
-->
|
|
||||||
|
|
||||||
### Describe the bug
|
|
||||||
<!--
|
|
||||||
A clear and concise description of what the bug is.
|
A clear and concise description of what the bug is.
|
||||||
-->
|
|
||||||
|
|
||||||
### To Reproduce
|
**To Reproduce**
|
||||||
|
|
||||||
Steps to reproduce the behavior:
|
Steps to reproduce the behavior:
|
||||||
1. Go to '...'
|
1. Go to '...'
|
||||||
@ -24,23 +19,22 @@ Steps to reproduce the behavior:
|
|||||||
3. Scroll down to '....'
|
3. Scroll down to '....'
|
||||||
4. See error
|
4. See error
|
||||||
|
|
||||||
### Expected behavior
|
**Expected behavior**
|
||||||
<!--
|
|
||||||
A clear and concise description of what you expected to happen.
|
A clear and concise description of what you expected to happen.
|
||||||
-->
|
|
||||||
|
|
||||||
### Desktop/Server/Software (please complete the following information):
|
**Desktop/Server/Software (please complete the following information):**
|
||||||
|
|
||||||
- OS: [e.g. debian 12]
|
- OS: [e.g. debian 12]
|
||||||
- ffplayout version
|
- ffplayout version
|
||||||
- ffmpeg version
|
- ffmpeg version
|
||||||
- are you using the current master of ffplayout?
|
- are you using the current master of ffplayout?
|
||||||
|
|
||||||
### Config Settings:
|
**Config Settings:**
|
||||||
|
|
||||||
- command line arguments
|
- command line arguments
|
||||||
- config fie
|
- config fie
|
||||||
|
|
||||||
### Logging:
|
**Logging:**
|
||||||
|
|
||||||
- content of: ffplayout.log
|
- content of: ffplayout.log
|
||||||
|
26
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,26 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature request
|
|
||||||
about: Suggest an idea for this project
|
|
||||||
title: '[Enhancement] <!--FEATURE NAME-->'
|
|
||||||
labels: enhancement
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Note: use this template only when you have a feature request!
|
|
||||||
-->
|
|
||||||
|
|
||||||
### Feature description
|
|
||||||
<!--
|
|
||||||
A clear and concise description of what the feature should do.
|
|
||||||
-->
|
|
||||||
|
|
||||||
### The problem in the current version
|
|
||||||
<!--
|
|
||||||
What exactly is currently missing?
|
|
||||||
-->
|
|
||||||
|
|
||||||
### Alternative ways
|
|
||||||
<!--
|
|
||||||
What have you already tried to solve this problem?
|
|
||||||
-->
|
|
12
.github/workflows/auto_close_issue.yml
vendored
@ -1,12 +0,0 @@
|
|||||||
name: Autocloser
|
|
||||||
on: [issues]
|
|
||||||
jobs:
|
|
||||||
autoclose:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Autoclose issues that did not follow issue template
|
|
||||||
uses: roots/issue-closer@v1.2
|
|
||||||
with:
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
issue-close-message: "@${issue.user.login} this issue was automatically closed because it did not follow the issue template. Please read [CONTRIBUTING.md](https://github.com/ffplayout/ffplayout/blob/master/CONTRIBUTING.md) for more informations."
|
|
||||||
issue-pattern: ".*### Describe the bug([\\s\\S]*?)### To Reproduce.*|### Feature description.*"
|
|
33
.gitignore
vendored
@ -20,40 +20,9 @@
|
|||||||
*.deb
|
*.deb
|
||||||
*.rpm
|
*.rpm
|
||||||
ffplayout.1.gz
|
ffplayout.1.gz
|
||||||
|
ffpapi.1.gz
|
||||||
/assets/*.db*
|
/assets/*.db*
|
||||||
/dist/
|
/dist/
|
||||||
data/
|
|
||||||
/public/
|
/public/
|
||||||
tmp/
|
tmp/
|
||||||
assets/playlist_template.json
|
assets/playlist_template.json
|
||||||
advanced*.toml
|
|
||||||
ffplayout*.toml
|
|
||||||
template.json
|
|
||||||
|
|
||||||
# frontend stuff
|
|
||||||
node_modules
|
|
||||||
.nuxt
|
|
||||||
.nitro
|
|
||||||
.cache
|
|
||||||
.output
|
|
||||||
.env
|
|
||||||
dist
|
|
||||||
.eslintcache
|
|
||||||
*.tgz
|
|
||||||
.yarn-integrity
|
|
||||||
sw.*
|
|
||||||
.DS_Store
|
|
||||||
*.swp
|
|
||||||
master.m3u8
|
|
||||||
tv-media
|
|
||||||
tv-media/
|
|
||||||
Videos
|
|
||||||
Videos/
|
|
||||||
*.tar*
|
|
||||||
home
|
|
||||||
home/
|
|
||||||
live1
|
|
||||||
live1/
|
|
||||||
Musik
|
|
||||||
Musik/
|
|
||||||
test.vue
|
|
||||||
|
4
.gitmodules
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[submodule "ffplayout-frontend"]
|
||||||
|
path = ffplayout-frontend
|
||||||
|
url = https://github.com/ffplayout/ffplayout-frontend.git
|
||||||
|
branch = v0.10.x
|
8
.ignore
@ -1,8 +0,0 @@
|
|||||||
assets/
|
|
||||||
debian/
|
|
||||||
docker/
|
|
||||||
docs/
|
|
||||||
frontend/
|
|
||||||
migrations/
|
|
||||||
scripts/
|
|
||||||
tests/
|
|
6
.vscode/extensions.json
vendored
@ -1,13 +1,7 @@
|
|||||||
{
|
{
|
||||||
"recommendations": [
|
"recommendations": [
|
||||||
"bradlc.vscode-tailwindcss",
|
|
||||||
"dbaeumer.vscode-eslint",
|
|
||||||
"esbenp.prettier-vscode",
|
|
||||||
"hollowtree.vue-snippets",
|
|
||||||
"rust-lang.rust-analyzer",
|
"rust-lang.rust-analyzer",
|
||||||
"statiolake.vscode-rustfmt",
|
"statiolake.vscode-rustfmt",
|
||||||
"tamasfe.even-better-toml",
|
"tamasfe.even-better-toml",
|
||||||
"vue.volar",
|
|
||||||
"wscats.vue",
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
71
.vscode/settings.json
vendored
@ -1,15 +1,4 @@
|
|||||||
{
|
{
|
||||||
"eslint.useFlatConfig": true,
|
|
||||||
"prettier.tabWidth": 4,
|
|
||||||
"prettier.printWidth": 120,
|
|
||||||
"vue3snippets.semi": false,
|
|
||||||
"vue3snippets.singleQuote": true,
|
|
||||||
"vue3snippets.jsxSingleQuote": true,
|
|
||||||
"vue3snippets.printWidth": 120,
|
|
||||||
"vue3snippets.tabWidth": 4,
|
|
||||||
"prettier.jsxSingleQuote": true,
|
|
||||||
"prettier.semi": false,
|
|
||||||
"prettier.singleQuote": true,
|
|
||||||
"rust-analyzer.cargo.target": null,
|
"rust-analyzer.cargo.target": null,
|
||||||
"rust-analyzer.checkOnSave": true,
|
"rust-analyzer.checkOnSave": true,
|
||||||
"rust-analyzer.cargo.buildScripts.overrideCommand": null,
|
"rust-analyzer.cargo.buildScripts.overrideCommand": null,
|
||||||
@ -21,24 +10,6 @@
|
|||||||
"[dockercompose]": {
|
"[dockercompose]": {
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||||
},
|
},
|
||||||
"[css]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
},
|
|
||||||
"[html]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
},
|
|
||||||
"[javascript]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
},
|
|
||||||
"[scss]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
},
|
|
||||||
"[vue]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
},
|
|
||||||
"[typescript]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
},
|
|
||||||
"[rust]": {
|
"[rust]": {
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"editor.defaultFormatter": "statiolake.vscode-rustfmt"
|
"editor.defaultFormatter": "statiolake.vscode-rustfmt"
|
||||||
@ -48,49 +19,9 @@
|
|||||||
},
|
},
|
||||||
"cSpell.words": [
|
"cSpell.words": [
|
||||||
"actix",
|
"actix",
|
||||||
"aevalsrc",
|
|
||||||
"afade",
|
|
||||||
"apad",
|
|
||||||
"boxborderw",
|
|
||||||
"boxcolor",
|
|
||||||
"canonicalize",
|
|
||||||
"cgop",
|
|
||||||
"coeffs",
|
|
||||||
"ffpengine",
|
|
||||||
"flexi",
|
|
||||||
"fontcolor",
|
|
||||||
"fontfile",
|
|
||||||
"fontsize",
|
|
||||||
"httpauth",
|
|
||||||
"ifnot",
|
|
||||||
"keyint",
|
|
||||||
"lettre",
|
|
||||||
"libc",
|
|
||||||
"libx",
|
|
||||||
"libzmq",
|
|
||||||
"maxrate",
|
|
||||||
"minrate",
|
|
||||||
"muxdelay",
|
|
||||||
"muxer",
|
|
||||||
"muxpreload",
|
|
||||||
"n'vtt",
|
|
||||||
"neli",
|
|
||||||
"nuxt",
|
|
||||||
"paris",
|
|
||||||
"Referer",
|
|
||||||
"reqwest",
|
|
||||||
"rsplit",
|
"rsplit",
|
||||||
"RTSP",
|
|
||||||
"rustls",
|
|
||||||
"scenecut",
|
|
||||||
"sqlite",
|
|
||||||
"sqlx",
|
|
||||||
"starttls",
|
"starttls",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tpad",
|
"uuids"
|
||||||
"unistd",
|
|
||||||
"uuids",
|
|
||||||
"webm",
|
|
||||||
"zerolatency"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
2398
Cargo.lock
generated
@ -1,11 +1,10 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = ["engine", "tests"]
|
members = ["ffplayout-api", "ffplayout-engine", "lib", "tests"]
|
||||||
|
default-members = ["ffplayout-api", "ffplayout-engine", "tests"]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
description = "24/7 playout based on rust and ffmpeg"
|
version = "0.23.2"
|
||||||
readme = "README.md"
|
|
||||||
version = "0.24.0"
|
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
repository = "https://github.com/ffplayout/ffplayout"
|
repository = "https://github.com/ffplayout/ffplayout"
|
||||||
authors = ["Jonathan Baecker <jonbae77@gmail.com>"]
|
authors = ["Jonathan Baecker <jonbae77@gmail.com>"]
|
||||||
|
91
README.md
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
|
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
|
||||||
|
|
||||||
![player](/docs/images/player.png)
|
## **ffplayout-engine (ffplayout)**
|
||||||
|
|
||||||
[ffplayout](/ffplayout-engine/README.md) is a 24/7 broadcasting solution. It can playout a folder containing audio or video clips, or play a *JSON* playlist for each day, keeping the current playlist editable.
|
[ffplayout](/ffplayout-engine/README.md) is a 24/7 broadcasting solution. It can playout a folder containing audio or video clips, or play a *JSON* playlist for each day, keeping the current playlist editable.
|
||||||
|
|
||||||
@ -13,13 +13,13 @@ Check the [releases](https://github.com/ffplayout/ffplayout/releases/latest) for
|
|||||||
|
|
||||||
### Features
|
### Features
|
||||||
|
|
||||||
- start program with [web based frontend](/frontend/), or run playout in foreground mode without frontend
|
- have all values in a separate config file
|
||||||
- dynamic playlist
|
- dynamic playlist
|
||||||
- replace missing playlist or clip with single filler or multiple fillers from folder, if no filler exists, create dummy clip
|
- replace missing playlist or clip with single filler or multiple fillers from folder, if no filler exists, create dummy clip
|
||||||
- playing clips in [watched](/docs/folder_mode.md) folder mode
|
- playing clips in [watched](/docs/folder_mode.md) folder mode
|
||||||
- send emails with error message
|
- send emails with error message
|
||||||
- overlay a logo
|
- overlay a logo
|
||||||
- overlay text, controllable through [web frontend](/frontend/) (needs ffmpeg with libzmq and enabled JSON RPC server)
|
- overlay text, controllable through [ffplayout-frontend](https://github.com/ffplayout/ffplayout-frontend) (needs ffmpeg with libzmq and enabled JSON RPC server)
|
||||||
- loop playlist infinitely
|
- loop playlist infinitely
|
||||||
- [remote source](/docs/remote_source.md)
|
- [remote source](/docs/remote_source.md)
|
||||||
- trim and fade the last clip, to get full 24 hours
|
- trim and fade the last clip, to get full 24 hours
|
||||||
@ -42,6 +42,7 @@ Check the [releases](https://github.com/ffplayout/ffplayout/releases/latest) for
|
|||||||
- **desktop**
|
- **desktop**
|
||||||
- **HLS**
|
- **HLS**
|
||||||
- **null** (for debugging)
|
- **null** (for debugging)
|
||||||
|
- JSON RPC server, to get information about what is playing and to control it
|
||||||
- [live ingest](/docs/live_ingest.md)
|
- [live ingest](/docs/live_ingest.md)
|
||||||
- image source (will loop until out duration is reached)
|
- image source (will loop until out duration is reached)
|
||||||
- extra audio source, has priority over audio from video (experimental *)
|
- extra audio source, has priority over audio from video (experimental *)
|
||||||
@ -50,19 +51,23 @@ Check the [releases](https://github.com/ffplayout/ffplayout/releases/latest) for
|
|||||||
- [custom filters](/docs/custom_filters.md) globally in config, or in playlist for specific clips
|
- [custom filters](/docs/custom_filters.md) globally in config, or in playlist for specific clips
|
||||||
- import playlist from text or m3u file, with CLI or frontend
|
- import playlist from text or m3u file, with CLI or frontend
|
||||||
- audio only, for radio mode (experimental *)
|
- audio only, for radio mode (experimental *)
|
||||||
|
- [Piggyback Mode](/ffplayout-api/README.md#piggyback-mode), mostly for non Linux systems (experimental *)
|
||||||
- generate playlist based on [template](/docs/playlist_gen.md) (experimental *)
|
- generate playlist based on [template](/docs/playlist_gen.md) (experimental *)
|
||||||
- During playlist import, all video clips are validated and, if desired, checked to ensure that the audio track is not completely muted.
|
- During playlist import, all video clips are validated and, if desired, checked to ensure that the audio track is not completely muted.
|
||||||
- run multiple channels (experimental *)
|
|
||||||
|
|
||||||
For preview stream, read: [/docs/preview_stream.md](/docs/preview_stream.md)
|
For preview stream, read: [/docs/preview_stream.md](/docs/preview_stream.md)
|
||||||
|
|
||||||
**\* Experimental features do not guarantee the same stability and may fail under unusual circumstances. Code and configuration options may change in the future.**
|
**\* Experimental features do not guarantee the same stability and may fail under unusual circumstances. Code and configuration options may change in the future.**
|
||||||
|
|
||||||
|
## **ffplayout-api (ffpapi)**
|
||||||
|
|
||||||
|
ffpapi serves the [frontend](https://github.com/ffplayout/ffplayout-frontend) and it acts as a [REST API](/ffplayout-api/README.md) for controlling the engine, manipulate playlists, add settings etc.
|
||||||
|
|
||||||
### Requirements
|
### Requirements
|
||||||
|
|
||||||
- RAM and CPU depends on video resolution, minimum 4 _dedicated_ threads and 3GB RAM for 720p are recommend
|
- RAM and CPU depends on video resolution, minimum 4 threads and 3GB RAM for 720p are recommend
|
||||||
- **ffmpeg** v5.0+ and **ffprobe** (**ffplay** if you want to play on desktop)
|
- **ffmpeg** v5.0+ and **ffprobe** (**ffplay** if you want to play on desktop)
|
||||||
- if you want to overlay dynamic text, ffmpeg needs to have **libzmq**
|
- if you want to overlay text, ffmpeg needs to have **libzmq**
|
||||||
|
|
||||||
### Install
|
### Install
|
||||||
|
|
||||||
@ -114,22 +119,80 @@ Check [install](docs/install.md) for details about how to install ffplayout.
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
If you are in playlist mode and move backwards or forwards in time, the time shift is saved so the playlist is still in sync. Bear in mind, however, that this may make your playlist too short. If you do not reset it, it will automatically reset the next day.
|
|
||||||
|
|
||||||
## **Warning**
|
## **Warning**
|
||||||
|
|
||||||
(Endless) streaming over multiple days will only work if config has a **day_start** value and the **length** value is **24 hours**. If you only need a few hours for each day, use a *cron* job or something similar.
|
(Endless) streaming over multiple days will only work if config has a **day_start** value and the **length** value is **24 hours**. If you only need a few hours for each day, use a *cron* job or something similar.
|
||||||
|
|
||||||
## Note
|
-----
|
||||||
This project includes the DejaVu font, which are licensed under the [Bitstream Vera Fonts License](/assets/FONT_LICENSE.txt).
|
|
||||||
|
## HLS output
|
||||||
|
|
||||||
|
For outputting to HLS, output parameters should look like:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
out:
|
||||||
|
...
|
||||||
|
|
||||||
|
output_param: >-
|
||||||
|
...
|
||||||
|
|
||||||
|
-flags +cgop
|
||||||
|
-f hls
|
||||||
|
-hls_time 6
|
||||||
|
-hls_list_size 600
|
||||||
|
-hls_flags append_list+delete_segments+omit_endlist+program_date_time
|
||||||
|
-hls_segment_filename /var/www/html/live/stream-%09d.ts /var/www/html/live/stream.m3u8
|
||||||
|
```
|
||||||
|
|
||||||
-----
|
-----
|
||||||
|
|
||||||
## Sponsoring
|
## JSON RPC
|
||||||
|
|
||||||
|
The ffplayout engine can run a simple RPC server. A request looks like:
|
||||||
|
|
||||||
|
```Bash
|
||||||
|
curl -X POST -H "Content-Type: application/json" -H "Authorization: ---auth-key---" \
|
||||||
|
-d '{"control":"next"}' \
|
||||||
|
127.0.0.1:7070
|
||||||
|
```
|
||||||
|
|
||||||
|
At the moment this commends are possible:
|
||||||
|
|
||||||
|
```Bash
|
||||||
|
'{"media":"current"}' # get infos about current clip
|
||||||
|
'{"media":"next"}' # get infos about next clip
|
||||||
|
'{"media":"last"}' # get infos about last clip
|
||||||
|
'{"control":"next"}' # jump to next clip
|
||||||
|
'{"control":"back"}' # jump to last clip
|
||||||
|
'{"control":"reset"}' # reset playlist to old state
|
||||||
|
'{"control":"text", \
|
||||||
|
"message": {"text": "Hello from ffplayout", "x": "(w-text_w)/2", "y": "(h-text_h)/2", \
|
||||||
|
"fontsize": 24, "line_spacing": 4, "fontcolor": "#ffffff", "box": 1, \
|
||||||
|
"boxcolor": "#000000", "boxborderw": 4, "alpha": 1.0}}' # send text to drawtext filter from ffmpeg
|
||||||
|
```
|
||||||
|
|
||||||
|
Output from `{"media":"current"}` show:
|
||||||
|
|
||||||
|
```JSON
|
||||||
|
{
|
||||||
|
"media": {
|
||||||
|
"category": "",
|
||||||
|
"duration": 154.2,
|
||||||
|
"out": 154.2,
|
||||||
|
"in": 0.0,
|
||||||
|
"source": "/opt/tv-media/clip.mp4"
|
||||||
|
},
|
||||||
|
"index": 39,
|
||||||
|
"mode": "playlist",
|
||||||
|
"ingest": false,
|
||||||
|
"played": 67.80771999300123,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are in playlist mode and move backwards or forwards in time, the time shift is saved so the playlist is still in sync. Bear in mind, however, that this may make your playlist too short. If you do not reset it, it will automatically reset the next day.
|
||||||
|
|
||||||
|
## Founding
|
||||||
|
|
||||||
If you like this project and would like to make a donation, please use one of the options provided.
|
If you like this project and would like to make a donation, please use one of the options provided.
|
||||||
Please note that donations are not intended to get support or features! Donations are only a sign of appreciation.
|
Please note that donations are not intended to get support or features! Donations are only a sign of appreciation.
|
||||||
|
|
||||||
### Backers
|
|
||||||
|
|
||||||
[![](https://opencollective.com/ffplayout/backers.svg?width=800&button=true)](https://opencollective.com/ffplayout)
|
|
||||||
|
5
assets/11-ffplayout
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# give user ffpu permission to control the ffplayout systemd service
|
||||||
|
|
||||||
|
ffpu ALL = NOPASSWD: /usr/bin/systemctl start ffplayout.service, /usr/bin/systemctl stop ffplayout.service, /usr/bin/systemctl restart ffplayout.service, /usr/bin/systemctl status ffplayout.service, /usr/bin/systemctl is-active ffplayout.service, /usr/bin/systemctl enable ffplayout.service, /usr/bin/systemctl disable ffplayout.service
|
||||||
|
|
||||||
|
ffpu ALL = NOPASSWD: /usr/bin/systemctl start ffplayout@*, /usr/bin/systemctl stop ffplayout@*, /usr/bin/systemctl restart ffplayout@*, /usr/bin/systemctl status ffplayout@*, /usr/bin/systemctl is-active ffplayout@*, /usr/bin/systemctl enable ffplayout@*, /usr/bin/systemctl disable ffplayout@*
|
@ -1,187 +0,0 @@
|
|||||||
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
|
|
||||||
Glyphs imported from Arev fonts are (c) Tavmjong Bah (see below)
|
|
||||||
|
|
||||||
|
|
||||||
Bitstream Vera Fonts Copyright
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is
|
|
||||||
a trademark of Bitstream, Inc.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of the fonts accompanying this license ("Fonts") and associated
|
|
||||||
documentation files (the "Font Software"), to reproduce and distribute the
|
|
||||||
Font Software, including without limitation the rights to use, copy, merge,
|
|
||||||
publish, distribute, and/or sell copies of the Font Software, and to permit
|
|
||||||
persons to whom the Font Software is furnished to do so, subject to the
|
|
||||||
following conditions:
|
|
||||||
|
|
||||||
The above copyright and trademark notices and this permission notice shall
|
|
||||||
be included in all copies of one or more of the Font Software typefaces.
|
|
||||||
|
|
||||||
The Font Software may be modified, altered, or added to, and in particular
|
|
||||||
the designs of glyphs or characters in the Fonts may be modified and
|
|
||||||
additional glyphs or characters may be added to the Fonts, only if the fonts
|
|
||||||
are renamed to names not containing either the words "Bitstream" or the word
|
|
||||||
"Vera".
|
|
||||||
|
|
||||||
This License becomes null and void to the extent applicable to Fonts or Font
|
|
||||||
Software that has been modified and is distributed under the "Bitstream
|
|
||||||
Vera" names.
|
|
||||||
|
|
||||||
The Font Software may be sold as part of a larger software package but no
|
|
||||||
copy of one or more of the Font Software typefaces may be sold by itself.
|
|
||||||
|
|
||||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
|
||||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
|
|
||||||
TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
|
|
||||||
FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING
|
|
||||||
ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
|
|
||||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
|
|
||||||
THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE
|
|
||||||
FONT SOFTWARE.
|
|
||||||
|
|
||||||
Except as contained in this notice, the names of Gnome, the Gnome
|
|
||||||
Foundation, and Bitstream Inc., shall not be used in advertising or
|
|
||||||
otherwise to promote the sale, use or other dealings in this Font Software
|
|
||||||
without prior written authorization from the Gnome Foundation or Bitstream
|
|
||||||
Inc., respectively. For further information, contact: fonts at gnome dot
|
|
||||||
org.
|
|
||||||
|
|
||||||
Arev Fonts Copyright
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of the fonts accompanying this license ("Fonts") and
|
|
||||||
associated documentation files (the "Font Software"), to reproduce
|
|
||||||
and distribute the modifications to the Bitstream Vera Font Software,
|
|
||||||
including without limitation the rights to use, copy, merge, publish,
|
|
||||||
distribute, and/or sell copies of the Font Software, and to permit
|
|
||||||
persons to whom the Font Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright and trademark notices and this permission notice
|
|
||||||
shall be included in all copies of one or more of the Font Software
|
|
||||||
typefaces.
|
|
||||||
|
|
||||||
The Font Software may be modified, altered, or added to, and in
|
|
||||||
particular the designs of glyphs or characters in the Fonts may be
|
|
||||||
modified and additional glyphs or characters may be added to the
|
|
||||||
Fonts, only if the fonts are renamed to names not containing either
|
|
||||||
the words "Tavmjong Bah" or the word "Arev".
|
|
||||||
|
|
||||||
This License becomes null and void to the extent applicable to Fonts
|
|
||||||
or Font Software that has been modified and is distributed under the
|
|
||||||
"Tavmjong Bah Arev" names.
|
|
||||||
|
|
||||||
The Font Software may be sold as part of a larger software package but
|
|
||||||
no copy of one or more of the Font Software typefaces may be sold by
|
|
||||||
itself.
|
|
||||||
|
|
||||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
|
|
||||||
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL
|
|
||||||
TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
|
|
||||||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
|
|
||||||
OTHER DEALINGS IN THE FONT SOFTWARE.
|
|
||||||
|
|
||||||
Except as contained in this notice, the name of Tavmjong Bah shall not
|
|
||||||
be used in advertising or otherwise to promote the sale, use or other
|
|
||||||
dealings in this Font Software without prior written authorization
|
|
||||||
from Tavmjong Bah. For further information, contact: tavmjong @ free
|
|
||||||
. fr.
|
|
||||||
|
|
||||||
TeX Gyre DJV Math
|
|
||||||
-----------------
|
|
||||||
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
|
|
||||||
|
|
||||||
Math extensions done by B. Jackowski, P. Strzelczyk and P. Pianowski
|
|
||||||
(on behalf of TeX users groups) are in public domain.
|
|
||||||
|
|
||||||
Letters imported from Euler Fraktur from AMSfonts are (c) American
|
|
||||||
Mathematical Society (see below).
|
|
||||||
Bitstream Vera Fonts Copyright
|
|
||||||
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera
|
|
||||||
is a trademark of Bitstream, Inc.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of the fonts accompanying this license (“Fonts”) and associated
|
|
||||||
documentation
|
|
||||||
files (the “Font Software”), to reproduce and distribute the Font Software,
|
|
||||||
including without limitation the rights to use, copy, merge, publish,
|
|
||||||
distribute,
|
|
||||||
and/or sell copies of the Font Software, and to permit persons to whom
|
|
||||||
the Font Software is furnished to do so, subject to the following
|
|
||||||
conditions:
|
|
||||||
|
|
||||||
The above copyright and trademark notices and this permission notice
|
|
||||||
shall be
|
|
||||||
included in all copies of one or more of the Font Software typefaces.
|
|
||||||
|
|
||||||
The Font Software may be modified, altered, or added to, and in particular
|
|
||||||
the designs of glyphs or characters in the Fonts may be modified and
|
|
||||||
additional
|
|
||||||
glyphs or characters may be added to the Fonts, only if the fonts are
|
|
||||||
renamed
|
|
||||||
to names not containing either the words “Bitstream” or the word “Vera”.
|
|
||||||
|
|
||||||
This License becomes null and void to the extent applicable to Fonts or
|
|
||||||
Font Software
|
|
||||||
that has been modified and is distributed under the “Bitstream Vera”
|
|
||||||
names.
|
|
||||||
|
|
||||||
The Font Software may be sold as part of a larger software package but
|
|
||||||
no copy
|
|
||||||
of one or more of the Font Software typefaces may be sold by itself.
|
|
||||||
|
|
||||||
THE FONT SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
|
||||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
|
|
||||||
TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
|
|
||||||
FOUNDATION
|
|
||||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL,
|
|
||||||
SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN
|
|
||||||
ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR
|
|
||||||
INABILITY TO USE
|
|
||||||
THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
|
|
||||||
Except as contained in this notice, the names of GNOME, the GNOME
|
|
||||||
Foundation,
|
|
||||||
and Bitstream Inc., shall not be used in advertising or otherwise to promote
|
|
||||||
the sale, use or other dealings in this Font Software without prior written
|
|
||||||
authorization from the GNOME Foundation or Bitstream Inc., respectively.
|
|
||||||
For further information, contact: fonts at gnome dot org.
|
|
||||||
|
|
||||||
AMSFonts (v. 2.2) copyright
|
|
||||||
|
|
||||||
The PostScript Type 1 implementation of the AMSFonts produced by and
|
|
||||||
previously distributed by Blue Sky Research and Y&Y, Inc. are now freely
|
|
||||||
available for general use. This has been accomplished through the
|
|
||||||
cooperation
|
|
||||||
of a consortium of scientific publishers with Blue Sky Research and Y&Y.
|
|
||||||
Members of this consortium include:
|
|
||||||
|
|
||||||
Elsevier Science IBM Corporation Society for Industrial and Applied
|
|
||||||
Mathematics (SIAM) Springer-Verlag American Mathematical Society (AMS)
|
|
||||||
|
|
||||||
In order to assure the authenticity of these fonts, copyright will be
|
|
||||||
held by
|
|
||||||
the American Mathematical Society. This is not meant to restrict in any way
|
|
||||||
the legitimate use of the fonts, such as (but not limited to) electronic
|
|
||||||
distribution of documents containing these fonts, inclusion of these fonts
|
|
||||||
into other public domain or commercial font collections or computer
|
|
||||||
applications, use of the outline data to create derivative fonts and/or
|
|
||||||
faces, etc. However, the AMS does require that the AMS copyright notice be
|
|
||||||
removed from any derivative versions of the fonts which have been altered in
|
|
||||||
any way. In addition, to ensure the fidelity of TeX documents using Computer
|
|
||||||
Modern fonts, Professor Donald Knuth, creator of the Computer Modern faces,
|
|
||||||
has requested that any alterations which yield different font metrics be
|
|
||||||
given a different name.
|
|
||||||
|
|
||||||
$Id$
|
|
37
assets/advanced.toml
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Changing these settings is for advanced users only!
|
||||||
|
# There will be no support or guarantee that it will be stable after changing them.
|
||||||
|
|
||||||
|
[decoder]
|
||||||
|
input_param = ""
|
||||||
|
# output_param get also applied to ingest instance.
|
||||||
|
output_param = ""
|
||||||
|
|
||||||
|
[encoder]
|
||||||
|
input_param = ""
|
||||||
|
|
||||||
|
[filters]
|
||||||
|
deinterlace = "" # yadif=0:-1:0
|
||||||
|
pad_scale_w = "" # scale={}:-1
|
||||||
|
pad_scale_h = "" # scale=-1:{}
|
||||||
|
pad_video = "" # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2
|
||||||
|
fps = "" # fps={}
|
||||||
|
scale = "" # scale={}:{}
|
||||||
|
set_dar = "" # setdar=dar={}
|
||||||
|
fade_in = "" # fade=in:st=0:d=0.5
|
||||||
|
fade_out = "" # fade=out:st={}:d=1.0
|
||||||
|
overlay_logo_scale = "" # scale={}
|
||||||
|
overlay_logo_fade_in = "" # fade=in:st=0:d=1.0:alpha=1
|
||||||
|
overlay_logo_fade_out = "" # fade=out:st={}:d=1.0:alpha=1
|
||||||
|
overlay_logo = "" # null[l];[v][l]overlay={}:shortest=1
|
||||||
|
tpad = "" # tpad=stop_mode=add:stop_duration={}
|
||||||
|
drawtext_from_file = "" # drawtext=text='{}':{}{}
|
||||||
|
drawtext_from_zmq = "" # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}
|
||||||
|
aevalsrc = "" # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000
|
||||||
|
afade_in = "" # afade=in:st=0:d=0.5
|
||||||
|
afade_out = "" # afade=out:st={}:d=1.0
|
||||||
|
apad = "" # apad=whole_dur={}
|
||||||
|
volume = "" # volume={}
|
||||||
|
split = "" # split={}{}
|
||||||
|
|
||||||
|
[ingest]
|
||||||
|
input_param = ""
|
@ -1 +0,0 @@
|
|||||||
WEBVTT
|
|
12
assets/ffpapi.service
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Rest API for ffplayout
|
||||||
|
After=network.target remote-fs.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/bin/ffpapi -l 0.0.0.0:8787
|
||||||
|
Restart=always
|
||||||
|
RestartSec=1
|
||||||
|
User=ffpu
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
@ -3,7 +3,7 @@ Description=Rust and ffmpeg based playout solution
|
|||||||
After=network.target remote-fs.target
|
After=network.target remote-fs.target
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart=/usr/bin/ffplayout -l 0.0.0.0:8787
|
ExecStart=/usr/bin/ffplayout
|
||||||
Restart=always
|
Restart=always
|
||||||
StartLimitInterval=20
|
StartLimitInterval=20
|
||||||
RestartSec=1
|
RestartSec=1
|
||||||
|
168
assets/ffplayout.toml
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
[general]
|
||||||
|
help_text = """Sometimes it can happen, that a file is corrupt but still playable, \
|
||||||
|
this can produce an streaming error over all following files. The only way \
|
||||||
|
in this case is, to stop ffplayout and start it again. Here we only say when \
|
||||||
|
it stops, the starting process is in your hand. Best way is a systemd service \
|
||||||
|
on linux.
|
||||||
|
'stop_threshold' stop ffplayout, if it is async in time above this \
|
||||||
|
value. A number below 3 can cause unexpected errors."""
|
||||||
|
stop_threshold = 11
|
||||||
|
stat_file = ".ffp_status"
|
||||||
|
|
||||||
|
[rpc_server]
|
||||||
|
help_text = """Run a JSON RPC server, for getting infos about current playing and for some \
|
||||||
|
control functions."""
|
||||||
|
enable = true
|
||||||
|
address = "127.0.0.1:7070"
|
||||||
|
authorization = "av2Kx8g67lF9qj5wEH3ym1bI4cCs"
|
||||||
|
|
||||||
|
[mail]
|
||||||
|
help_text = """Send error messages to email address, like missing playlist; invalid \
|
||||||
|
json format; missing clip path. Leave recipient blank, if you don't need this.
|
||||||
|
'mail_level' can be INFO, WARNING or ERROR.
|
||||||
|
'interval' means seconds until a new mail will be sended."""
|
||||||
|
subject = "Playout Error"
|
||||||
|
smtp_server = "mail.example.org"
|
||||||
|
starttls = true
|
||||||
|
sender_addr = "ffplayout@example.org"
|
||||||
|
sender_pass = "abc123"
|
||||||
|
recipient = ""
|
||||||
|
mail_level = "ERROR"
|
||||||
|
interval = 30
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
help_text = """If 'log_to_file' is true, log to file, when is false log to console.
|
||||||
|
'backup_count' says how long log files will be saved in days.
|
||||||
|
'local_time' to false will set log timestamps to UTC. Path to /var/log/ only \
|
||||||
|
if you run this program as daemon.
|
||||||
|
'level' can be DEBUG, INFO, WARNING, ERROR.
|
||||||
|
'ffmpeg_level/ingest_level' can be INFO, WARNING, ERROR.
|
||||||
|
'detect_silence' logs an error message if the audio line is silent for 15 \
|
||||||
|
seconds during the validation process.
|
||||||
|
'ignore_lines' makes logging to ignore strings that contains matched lines, \
|
||||||
|
in frontend is a semicolon separated list."""
|
||||||
|
log_to_file = true
|
||||||
|
backup_count = 7
|
||||||
|
local_time = true
|
||||||
|
timestamp = true
|
||||||
|
path = "/var/log/ffplayout/"
|
||||||
|
level = "DEBUG"
|
||||||
|
ffmpeg_level = "ERROR"
|
||||||
|
ingest_level = "WARNING"
|
||||||
|
detect_silence = false
|
||||||
|
ignore_lines = [
|
||||||
|
"P sub_mb_type 4 out of range at",
|
||||||
|
"error while decoding MB",
|
||||||
|
"negative number of zero coeffs at",
|
||||||
|
"out of range intra chroma pred mode",
|
||||||
|
"non-existing SPS 0 referenced in buffering period",
|
||||||
|
]
|
||||||
|
|
||||||
|
[processing]
|
||||||
|
help_text = """Default processing for all clips, to have them unique. Mode can be playlist \
|
||||||
|
or folder.
|
||||||
|
'aspect' must be a float number.'logo' is only used if the path exist.
|
||||||
|
'logo_scale' scale the logo to target size, leave it blank when no scaling \
|
||||||
|
is needed, format is 'width:height', for example '100:-1' for proportional \
|
||||||
|
scaling. With 'logo_opacity' logo can become transparent.
|
||||||
|
With 'audio_tracks' it is possible to configure how many audio tracks should \
|
||||||
|
be processed. 'audio_channels' can be use, if audio has more channels then only stereo.
|
||||||
|
With 'logo_position' in format 'x:y' you set the logo position.
|
||||||
|
With 'custom_filter' it is possible, to apply further filters. The filter \
|
||||||
|
outputs should end with [c_v_out] for video filter, and [c_a_out] for audio filter."""
|
||||||
|
mode = "playlist"
|
||||||
|
audio_only = false
|
||||||
|
copy_audio = false
|
||||||
|
copy_video = false
|
||||||
|
width = 1024
|
||||||
|
height = 576
|
||||||
|
aspect = 1.778
|
||||||
|
fps = 25
|
||||||
|
add_logo = true
|
||||||
|
logo = "/usr/share/ffplayout/logo.png"
|
||||||
|
logo_scale = ""
|
||||||
|
logo_opacity = 0.7
|
||||||
|
logo_position = "W-w-12:12"
|
||||||
|
audio_tracks = 1
|
||||||
|
audio_track_index = -1
|
||||||
|
audio_channels = 2
|
||||||
|
volume = 1
|
||||||
|
custom_filter = ""
|
||||||
|
|
||||||
|
[ingest]
|
||||||
|
help_text = """Run a server for a ingest stream. This stream will override the normal streaming \
|
||||||
|
until is done. There is only a very simple authentication mechanism, which check if the \
|
||||||
|
stream name is correct.
|
||||||
|
'custom_filter' can be used in the same way then the one in the process section."""
|
||||||
|
enable = false
|
||||||
|
input_param = "-f live_flv -listen 1 -i rtmp://127.0.0.1:1936/live/stream"
|
||||||
|
custom_filter = ""
|
||||||
|
|
||||||
|
[playlist]
|
||||||
|
help_text = """'path' can be a path to a single file, or a directory. For directory put \
|
||||||
|
only the root folder, for example '/playlists', subdirectories are read by the \
|
||||||
|
program. Subdirectories needs this structure '/playlists/2018/01'.
|
||||||
|
'day_start' means at which time the playlist should start, leave day_start \
|
||||||
|
blank when playlist should always start at the begin. 'length' represent the \
|
||||||
|
target length from playlist, when is blank real length will not consider.
|
||||||
|
'infinit: true' works with single playlist file and loops it infinitely. """
|
||||||
|
path = "/var/lib/ffplayout/playlists"
|
||||||
|
day_start = "05:59:25"
|
||||||
|
length = "24:00:00"
|
||||||
|
infinit = false
|
||||||
|
|
||||||
|
[storage]
|
||||||
|
help_text = """'filler' is for playing instead of a missing file or fill the end to reach 24 \
|
||||||
|
hours, can be a file or folder, it will loop when is necessary.
|
||||||
|
'extensions' search only files with this extension. Set 'shuffle' to 'true' \
|
||||||
|
to pick files randomly."""
|
||||||
|
path = "/var/lib/ffplayout/tv-media"
|
||||||
|
filler = "/var/lib/ffplayout/tv-media/filler/filler.mp4"
|
||||||
|
extensions = ["mp4", "mkv", "webm"]
|
||||||
|
shuffle = true
|
||||||
|
|
||||||
|
[text]
|
||||||
|
help_text = """Overlay text in combination with libzmq for remote text manipulation. \
|
||||||
|
On windows fontfile path need to be like this 'C\\:/WINDOWS/fonts/DejaVuSans.ttf'.
|
||||||
|
'text_from_filename' activate the extraction from text of a filename. With 'style' \
|
||||||
|
you can define the drawtext parameters like position, color, etc. Post Text over \
|
||||||
|
API will override this. With 'regex' you can format file names, to get a title from it."""
|
||||||
|
add_text = true
|
||||||
|
text_from_filename = false
|
||||||
|
fontfile = "/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf"
|
||||||
|
style = "x=(w-tw)/2:y=(h-line_h)*0.9:fontsize=24:fontcolor=#ffffff:box=1:boxcolor=#000000:boxborderw=4"
|
||||||
|
regex = "^.+[/\\](.*)(.mp4|.mkv|.webm)$"
|
||||||
|
|
||||||
|
[task]
|
||||||
|
help_text = """Run an external program with a given media object. The media object is in json format \
|
||||||
|
and contains all the information about the current clip. The external program can be a script \
|
||||||
|
or a binary, but should only run for a short time."""
|
||||||
|
enable = false
|
||||||
|
path = ""
|
||||||
|
|
||||||
|
[out]
|
||||||
|
help_text = """The final playout compression. Set the settings to your needs. 'mode' \
|
||||||
|
has the options 'desktop', 'hls', 'null', 'stream'. Use 'stream' and adjust \
|
||||||
|
'output_param:' settings when you want to stream to a rtmp/rtsp/srt/... server.
|
||||||
|
In production don't serve hls playlist with ffpapi, use nginx or another web server!"""
|
||||||
|
mode = "hls"
|
||||||
|
output_param = """\
|
||||||
|
-c:v libx264
|
||||||
|
-crf 23
|
||||||
|
-x264-params keyint=50:min-keyint=25:scenecut=-1
|
||||||
|
-maxrate 1300k
|
||||||
|
-bufsize 2600k
|
||||||
|
-preset faster
|
||||||
|
-tune zerolatency
|
||||||
|
-profile:v Main
|
||||||
|
-level 3.1
|
||||||
|
-c:a aac
|
||||||
|
-ar 44100
|
||||||
|
-b:a 128k
|
||||||
|
-flags +cgop
|
||||||
|
-f hls
|
||||||
|
-hls_time 6
|
||||||
|
-hls_list_size 600
|
||||||
|
-hls_flags append_list+delete_segments+omit_endlist
|
||||||
|
-hls_segment_filename /usr/share/ffplayout/public/live/stream-%d.ts
|
||||||
|
/usr/share/ffplayout/public/live/stream.m3u8"""
|
14
assets/ffplayout@.service
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Rust and ffmpeg based multi channel playout solution
|
||||||
|
After=network.target remote-fs.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/bin/ffplayout %I
|
||||||
|
Restart=always
|
||||||
|
StartLimitInterval=20
|
||||||
|
RestartSec=1
|
||||||
|
KillMode=mixed
|
||||||
|
User=ffpu
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
5
debian/postinst
vendored
@ -17,8 +17,13 @@ if [ ! -d "/usr/share/ffplayout/db" ]; then
|
|||||||
mkdir -p "/var/lib/ffplayout/playlists"
|
mkdir -p "/var/lib/ffplayout/playlists"
|
||||||
mkdir -p "/var/lib/ffplayout/tv-media"
|
mkdir -p "/var/lib/ffplayout/tv-media"
|
||||||
|
|
||||||
|
IP=$(hostname -I | cut -d ' ' -f1)
|
||||||
|
|
||||||
|
/usr/bin/ffpapi -i -d "${IP}:8787"
|
||||||
|
|
||||||
chown -R ${sysUser}: "/usr/share/ffplayout"
|
chown -R ${sysUser}: "/usr/share/ffplayout"
|
||||||
chown -R ${sysUser}: "/var/lib/ffplayout"
|
chown -R ${sysUser}: "/var/lib/ffplayout"
|
||||||
|
chown -R ${sysUser}: "/etc/ffplayout"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ! -d "/var/log/ffplayout" ]; then
|
if [ ! -d "/var/log/ffplayout" ]; then
|
||||||
|
2
debian/postrm
vendored
@ -6,7 +6,7 @@ sysUser="ffpu"
|
|||||||
case "$1" in
|
case "$1" in
|
||||||
abort-install|purge)
|
abort-install|purge)
|
||||||
deluser $sysUser
|
deluser $sysUser
|
||||||
rm -rf /usr/share/ffplayout /var/log/ffplayout /var/lib/ffplayout /home/$sysUser
|
rm -rf /usr/share/ffplayout /var/log/ffplayout /etc/ffplayout /var/lib/ffplayout /home/$sysUser
|
||||||
;;
|
;;
|
||||||
|
|
||||||
remove)
|
remove)
|
||||||
|
@ -1,38 +1,45 @@
|
|||||||
FROM alpine:latest
|
FROM almalinux:9 AS base
|
||||||
|
|
||||||
ARG FFPLAYOUT_VERSION=0.24.0-rc3
|
ENV container docker
|
||||||
ARG SHARED_STORAGE=false
|
|
||||||
|
|
||||||
ENV DB=/db
|
RUN (cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == \
|
||||||
ENV SHARED_STORAGE=${SHARED_STORAGE}
|
systemd-tmpfiles-setup.service ] || rm -f $i; done); \
|
||||||
|
rm -f /lib/systemd/system/multi-user.target.wants/*; \
|
||||||
|
rm -f /etc/systemd/system/*.wants/*; \
|
||||||
|
rm -f /lib/systemd/system/local-fs.target.wants/*; \
|
||||||
|
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
|
||||||
|
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
|
||||||
|
rm -f /lib/systemd/system/basic.target.wants/*; \
|
||||||
|
rm -f /lib/systemd/system/anaconda.target.wants/*
|
||||||
|
|
||||||
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
|
FROM base
|
||||||
|
|
||||||
COPY <<-EOT /run.sh
|
ARG FFPLAYOUT_VERSION=0.22.0
|
||||||
#!/bin/sh
|
COPY README.md *.rpm /tmp/
|
||||||
|
|
||||||
if [ ! -f /db/ffplayout.db ]; then
|
RUN dnf update -y && \
|
||||||
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
|
dnf install -y epel-release && \
|
||||||
fi
|
dnf install -y 'dnf-command(config-manager)' && \
|
||||||
|
dnf config-manager --set-enabled crb && \
|
||||||
|
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/free/el/rpmfusion-free-release-$(rpm -E %rhel).noarch.rpm && \
|
||||||
|
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-$(rpm -E %rhel).noarch.rpm && \
|
||||||
|
dnf install -y ffmpeg wget dejavu-sans-fonts sudo && \
|
||||||
|
dnf clean all
|
||||||
|
|
||||||
/usr/bin/ffplayout -l "0.0.0.0:8787"
|
RUN [[ -f /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm ]] || wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm" -P /tmp/ && \
|
||||||
EOT
|
dnf install -y /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
|
||||||
|
rm /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
|
||||||
RUN apk update && \
|
sed -i "s/User=ffpu/User=root/g" /usr/lib/systemd/system/ffpapi.service && \
|
||||||
apk upgrade && \
|
sed -i "s/User=ffpu/User=root/g" /usr/lib/systemd/system/ffplayout.service && \
|
||||||
apk add --no-cache ffmpeg sqlite font-dejavu && \
|
sed -i "s/User=ffpu/User=root/g" /usr/lib/systemd/system/ffplayout@.service && \
|
||||||
chmod +x /run.sh
|
systemctl enable ffplayout && \
|
||||||
|
systemctl enable ffpapi && \
|
||||||
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
|
ffpapi -u admin -p admin -m contact@example.com
|
||||||
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
|
|
||||||
cd /tmp && \
|
|
||||||
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
|
|
||||||
cp ffplayout /usr/bin/ && \
|
|
||||||
mkdir -p /usr/share/ffplayout/ && \
|
|
||||||
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
|
|
||||||
rm -rf /tmp/* && \
|
|
||||||
mkdir ${DB}
|
|
||||||
|
|
||||||
EXPOSE 8787
|
EXPOSE 8787
|
||||||
|
|
||||||
CMD ["/run.sh"]
|
# Maybe on some systems is needed, combined with run parameters: --tmpfs /tmp --tmpfs /run --tmpfs /run/lock
|
||||||
|
# More infos: https://serverfault.com/a/1087467/387878
|
||||||
|
#VOLUME [ "/tmp", "/run", "/run/lock" ]
|
||||||
|
|
||||||
|
CMD ["/usr/sbin/init"]
|
||||||
|
@ -1,5 +1,22 @@
|
|||||||
# Run ffplayout in container
|
# Run ffplayout in container
|
||||||
|
|
||||||
|
|
||||||
|
## Base Image
|
||||||
|
|
||||||
|
Use of [CentOS image](https://hub.docker.com/_/centos) as base image as it offer the possibility to use systemd.
|
||||||
|
In order to run systemd in a container it has to run in privileged mode and bind to the `cgroup` of the host.
|
||||||
|
|
||||||
|
> **_NOTE:_** A system with CGroup V2 is need!
|
||||||
|
> Currently tested host systems are:
|
||||||
|
> - debian 12 with official docker ce from docker.com
|
||||||
|
> - Manjaro from 2024 Kernel 6.6+
|
||||||
|
> - fedora 39 with podman
|
||||||
|
|
||||||
|
## Image
|
||||||
|
|
||||||
|
In addition to the base image, there is the compilation of ffmpeg and all lib from source based on https://github.com/jrottenberg/ffmpeg.
|
||||||
|
We can't use directly the image from `jrottenberg/ffmpeg` as it compile ffmpeg with the flag `--enable-small` that remove some part of the json from the ffprobe command.
|
||||||
|
|
||||||
The image is build with a default user/pass `admin/admin`.
|
The image is build with a default user/pass `admin/admin`.
|
||||||
|
|
||||||
You can take a look at the [Dockerfile](Dockerfile)
|
You can take a look at the [Dockerfile](Dockerfile)
|
||||||
@ -9,9 +26,10 @@ You can take a look at the [Dockerfile](Dockerfile)
|
|||||||
## Storage
|
## Storage
|
||||||
|
|
||||||
There are some folders/files that are important for ffplayout to work well such as:
|
There are some folders/files that are important for ffplayout to work well such as:
|
||||||
- **/usr/share/ffplayout/db** => where all the data are stored (user/pass etc)
|
- **/usr/share/ffplayout/db** => where all the data for the `ffpapi` are stored (user/pass etc)
|
||||||
- **/var/lib/ffplayout/tv-media** => where the media are stored by default (configurable)
|
- **/var/lib/ffplayout/tv-media** => where the media are stored by default (configurable)
|
||||||
- **/var/lib/ffplayout/playlists** => where playlists are stored (configurable)
|
- **/var/lib/ffplayout/playlists** => where playlists are stored (configurable)
|
||||||
|
- **/etc/ffplayout/ffplayout.yml** => the core config file
|
||||||
|
|
||||||
It may be useful to create/link volume for those folders/files.
|
It may be useful to create/link volume for those folders/files.
|
||||||
|
|
||||||
@ -22,37 +40,28 @@ How to build the image:\
|
|||||||
# build default
|
# build default
|
||||||
docker build -t ffplayout-image .
|
docker build -t ffplayout-image .
|
||||||
|
|
||||||
# build from root folder, to copy *.tar.gz with self compiled binary
|
# build from root folder, to copy local *.rpm package
|
||||||
docker build -f docker/Dockerfile -t ffplayout-image .
|
docker build -f docker/Dockerfile -t ffplayout-image:alma .
|
||||||
|
|
||||||
# build ffmpeg from source
|
# build ffmpeg from source
|
||||||
docker build -f ffmpeg.Dockerfile -t ffmpeg-build .
|
docker build -f fromSource.Dockerfile -t ffplayout-image:from-source .
|
||||||
docker build -f nonfree.Dockerfile -t ffplayout-image:nonfree .
|
|
||||||
|
|
||||||
# build with nvidia image for hardware support
|
# build with nvidia image for hardware support
|
||||||
docker build -f nvidia.Dockerfile -t ffplayout-image:nvidia .
|
docker build -f nvidia-centos7.Dockerfile -t ffplayout-image:nvidia .
|
||||||
```
|
```
|
||||||
|
|
||||||
example of command to start the container:
|
example of command to start the container:
|
||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
docker run -it -v /path/to/db:/db -v /path/to/storage:/tv-media -v /path/to/playlists:/playlists -v /path/to/public:/public -v /path/to/logging:/logging --name ffplayout -p 8787:8787 ffplayout-image
|
docker run -it --name ffplayout --privileged -p 8787:8787 ffplayout-image
|
||||||
|
|
||||||
# run in daemon mode
|
# run in daemon mode
|
||||||
docker run -d --name ffplayout -p 8787:8787 ffplayout-image
|
docker run -d --name ffplayout --privileged -p 8787:8787 ffplayout-image
|
||||||
|
|
||||||
# run with docker-compose
|
# run with docker-compose
|
||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
For setup mail server settings run:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker exec -it ffplayout ffplayout -i
|
|
||||||
```
|
|
||||||
|
|
||||||
Then restart Container
|
|
||||||
|
|
||||||
#### Note from CentOS docker hub page
|
#### Note from CentOS docker hub page
|
||||||
There have been reports that if you're using an Ubuntu host, you will need to add `-v /tmp/$(mktemp -d):/run` to the mount.
|
There have been reports that if you're using an Ubuntu host, you will need to add `-v /tmp/$(mktemp -d):/run` to the mount.
|
||||||
|
|
||||||
|
@ -2,15 +2,12 @@ version: '3'
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
ffplayout:
|
ffplayout:
|
||||||
|
cap_add:
|
||||||
|
- SYS_ADMIN
|
||||||
container_name: ffplayout
|
container_name: ffplayout
|
||||||
|
privileged: true
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./Dockerfile
|
dockerfile: ./Dockerfile
|
||||||
volumes:
|
|
||||||
- ./data/db:/db
|
|
||||||
- ./data/storage:/tv-media
|
|
||||||
- ./data/playlists:/playlists
|
|
||||||
- ./data/logging:/logging
|
|
||||||
- ./data/public:/public
|
|
||||||
ports:
|
ports:
|
||||||
- '8787:8787'
|
- '8787:8787'
|
||||||
|
@ -1,158 +0,0 @@
|
|||||||
FROM alpine:latest as builder
|
|
||||||
|
|
||||||
ENV EXTRA_CFLAGS=-march=generic \
|
|
||||||
LOCALBUILDDIR=/tmp/build \
|
|
||||||
LOCALDESTDIR=/tmp/local \
|
|
||||||
PKG_CONFIG="pkg-config --static" \
|
|
||||||
PKG_CONFIG_PATH=/tmp/local/lib/pkgconfig \
|
|
||||||
CPPFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
|
|
||||||
CFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
|
|
||||||
CXXFLAGS="-I/tmp/local/include -O2 -fPIC" \
|
|
||||||
LDFLAGS="-L/tmp/local/lib -pipe -Wl,-z,relro,-z,now -static" \
|
|
||||||
CC=clang
|
|
||||||
|
|
||||||
RUN apk add --no-cache \
|
|
||||||
clang \
|
|
||||||
glib-dev glib-static \
|
|
||||||
coreutils \
|
|
||||||
autoconf \
|
|
||||||
automake \
|
|
||||||
build-base \
|
|
||||||
cmake \
|
|
||||||
git \
|
|
||||||
libtool \
|
|
||||||
nasm \
|
|
||||||
pkgconfig \
|
|
||||||
yasm \
|
|
||||||
wget \
|
|
||||||
curl \
|
|
||||||
ninja-build \
|
|
||||||
meson \
|
|
||||||
cargo cargo-c \
|
|
||||||
diffutils \
|
|
||||||
bash
|
|
||||||
|
|
||||||
RUN apk add --no-cache \
|
|
||||||
zlib-dev zlib-static \
|
|
||||||
bzip2-dev bzip2-static \
|
|
||||||
expat-dev expat-static \
|
|
||||||
libxml2-dev libxml2-static \
|
|
||||||
fontconfig-dev fontconfig-static \
|
|
||||||
freetype freetype-dev freetype-static \
|
|
||||||
fribidi-dev fribidi-static \
|
|
||||||
harfbuzz-dev harfbuzz-static \
|
|
||||||
graphite2-static \
|
|
||||||
numactl-dev \
|
|
||||||
brotli-dev brotli-static \
|
|
||||||
soxr-dev soxr-static \
|
|
||||||
libjpeg-turbo libjpeg-turbo-dev \
|
|
||||||
libpng-dev libpng-static \
|
|
||||||
xvidcore-dev xvidcore-static \
|
|
||||||
libsodium-dev libsodium-static \
|
|
||||||
zeromq-dev libzmq-static \
|
|
||||||
openssl-dev openssl-libs-static
|
|
||||||
|
|
||||||
WORKDIR /tmp
|
|
||||||
RUN git clone --depth 1 "https://github.com/libass/libass.git" && cd libass && \
|
|
||||||
./autogen.sh && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && cd fdk-aac && \
|
|
||||||
./autogen.sh && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "lame-3.100.tar.gz" "https://downloads.sourceforge.net/project/lame/lame/3.100/lame-3.100.tar.gz" && \
|
|
||||||
tar xf "lame-3.100.tar.gz" && \
|
|
||||||
cd "lame-3.100" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-expopt=full --enable-shared=no && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "opus-1.4.tar.gz" "https://ftp.osuosl.org/pub/xiph/releases/opus/opus-1.4.tar.gz" && \
|
|
||||||
tar xf "opus-1.4.tar.gz" && \
|
|
||||||
cd "opus-1.4" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --enable-static --disable-doc && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/Haivision/srt.git" && cd srt && \
|
|
||||||
mkdir build && \
|
|
||||||
cd build && \
|
|
||||||
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DOPENSSL_USE_STATIC_LIBS=ON -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON -DENABLE_CXX11:BOOLEAN=ON -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone "https://github.com/webmproject/libvpx.git" && cd libvpx && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone "https://code.videolan.org/videolan/x264" && cd x264 && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-static && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone "https://bitbucket.org/multicoreware/x265_git.git" && cd x265_git/build && \
|
|
||||||
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG $CXXFLAGS" && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone "https://github.com/xiph/rav1e.git" && cd rav1e && \
|
|
||||||
RUSTFLAGS="-C target-feature=+crt-static" cargo cinstall --release --jobs $(nproc) --prefix=$LOCALDESTDIR --libdir=$LOCALDESTDIR/lib --includedir=$LOCALDESTDIR/include
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && cd SVT-AV1/Build && \
|
|
||||||
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && cd dav1d && \
|
|
||||||
mkdir build && cd build && \
|
|
||||||
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
|
|
||||||
ninja && \
|
|
||||||
ninja install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 https://git.ffmpeg.org/ffmpeg.git && cd ffmpeg && \
|
|
||||||
sed -i 's/add_ldexeflags -fPIE -pie/add_ldexeflags -fPIE -static-pie/' configure && \
|
|
||||||
./configure \
|
|
||||||
--pkg-config-flags=--static \
|
|
||||||
--extra-cflags="-fopenmp -DZMG_STATIC" \
|
|
||||||
--extra-ldflags="-fopenmp -Wl,--copy-dt-needed-entries -Wl,--allow-multiple-definition" \
|
|
||||||
--enable-runtime-cpudetect \
|
|
||||||
--prefix=/usr/local \
|
|
||||||
--disable-debug \
|
|
||||||
--disable-doc \
|
|
||||||
--disable-ffplay \
|
|
||||||
--disable-shared \
|
|
||||||
--enable-gpl \
|
|
||||||
--enable-version3 \
|
|
||||||
--enable-nonfree \
|
|
||||||
--enable-small \
|
|
||||||
--enable-static \
|
|
||||||
--enable-libass \
|
|
||||||
--enable-fontconfig \
|
|
||||||
--enable-libfdk-aac \
|
|
||||||
--enable-libfribidi \
|
|
||||||
--enable-libfreetype \
|
|
||||||
--enable-libharfbuzz \
|
|
||||||
--enable-libmp3lame \
|
|
||||||
--enable-libopus \
|
|
||||||
--enable-libsoxr \
|
|
||||||
--enable-libsrt \
|
|
||||||
--enable-libvpx \
|
|
||||||
--enable-libx264 \
|
|
||||||
--enable-libx265 \
|
|
||||||
--enable-libzmq \
|
|
||||||
--enable-nonfree \
|
|
||||||
--enable-openssl \
|
|
||||||
--enable-libsvtav1 \
|
|
||||||
--enable-librav1e \
|
|
||||||
--enable-libdav1d \
|
|
||||||
--enable-libxvid && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN strip /usr/local/bin/ffmpeg /usr/local/bin/ffprobe
|
|
163
docker/fromSource.Dockerfile
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
FROM almalinux:9 AS base
|
||||||
|
|
||||||
|
ENV container docker
|
||||||
|
|
||||||
|
RUN (cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == \
|
||||||
|
systemd-tmpfiles-setup.service ] || rm -f $i; done); \
|
||||||
|
rm -f /lib/systemd/system/multi-user.target.wants/*; \
|
||||||
|
rm -f /etc/systemd/system/*.wants/*; \
|
||||||
|
rm -f /lib/systemd/system/local-fs.target.wants/*; \
|
||||||
|
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
|
||||||
|
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
|
||||||
|
rm -f /lib/systemd/system/basic.target.wants/*; \
|
||||||
|
rm -f /lib/systemd/system/anaconda.target.wants/*
|
||||||
|
|
||||||
|
FROM base AS build
|
||||||
|
|
||||||
|
WORKDIR /tmp/workdir
|
||||||
|
|
||||||
|
ENV SRC=/usr/local \
|
||||||
|
BUILD=/tmp/build
|
||||||
|
|
||||||
|
ARG LD_LIBRARY_PATH=/opt/ffmpeg/lib
|
||||||
|
ARG PKG_CONFIG_PATH="/opt/ffmpeg/share/pkgconfig:/opt/ffmpeg/lib/pkgconfig:/opt/ffmpeg/lib64/pkgconfig:/lib64/pkgconfig"
|
||||||
|
ARG LOCALDESTDIR=/opt/ffmpeg
|
||||||
|
ARG LD_LIBRARY_PATH="/opt/ffmpeg/lib:/opt/ffmpeg/lib64"
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
buildDeps="bzip2 gperf which libticonv autoconf automake cmake diffutils file gcc \
|
||||||
|
ninja-build wget nasm gcc-c++ git libtool make perl yasm meson x264-devel zlib-devel \
|
||||||
|
expat-devel fontconfig-devel libxml2-devel lame-devel libpng-devel numactl-devel \
|
||||||
|
fribidi-devel zeromq-devel freetype-devel opus-devel libass-devel openssl-devel" && \
|
||||||
|
echo "${SRC}/lib" > /etc/ld.so.conf.d/libc.conf && \
|
||||||
|
dnf install -y epel-release && \
|
||||||
|
dnf install -y 'dnf-command(config-manager)' && \
|
||||||
|
dnf config-manager --set-enabled crb && \
|
||||||
|
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/free/el/rpmfusion-free-release-$(rpm -E %rhel).noarch.rpm && \
|
||||||
|
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-$(rpm -E %rhel).noarch.rpm && \
|
||||||
|
dnf install -y ${buildDeps} && \
|
||||||
|
mkdir -p ${BUILD}
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
git clone --depth 1 "https://github.com/Haivision/srt.git" && \
|
||||||
|
cd srt && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON \
|
||||||
|
-DENABLE_CXX11:BOOLEAN=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
git clone --depth 1 "https://code.videolan.org/rist/librist.git" && \
|
||||||
|
cd librist && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
meson setup --default-library=static --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" .. && \
|
||||||
|
ninja && \
|
||||||
|
ninja install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && \
|
||||||
|
cd fdk-aac && \
|
||||||
|
./autogen.sh && \
|
||||||
|
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && \
|
||||||
|
cd SVT-AV1/Build && \
|
||||||
|
rm -rf * && \
|
||||||
|
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release \
|
||||||
|
-DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && \
|
||||||
|
cd dav1d && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
|
||||||
|
ninja && \
|
||||||
|
ninja install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
git clone "https://github.com/webmproject/libvpx.git" && \
|
||||||
|
cd libvpx && \
|
||||||
|
./configure --prefix="$LOCALDESTDIR" --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
git clone "https://bitbucket.org/multicoreware/x265_git.git" x265 && \
|
||||||
|
cd x265/build && \
|
||||||
|
rm -rf * && \
|
||||||
|
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG" && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd ${BUILD} && \
|
||||||
|
wget "https://ffmpeg.org/releases/ffmpeg-snapshot.tar.bz2" && \
|
||||||
|
tar xfvj ffmpeg-snapshot.tar.bz2 && \
|
||||||
|
rm -rf ffmpeg-snapshot.tar.bz2 && \
|
||||||
|
cd ffmpeg && \
|
||||||
|
./configure --prefix="$LOCALDESTDIR" --enable-pthreads --extra-libs=-lpthread \
|
||||||
|
--disable-debug --disable-shared --disable-doc --enable-gpl --enable-version3 --pkg-config-flags=--static \
|
||||||
|
--enable-nonfree --enable-runtime-cpudetect --enable-fontconfig \
|
||||||
|
--enable-openssl --enable-libass --enable-libfdk-aac --enable-libfreetype \
|
||||||
|
--enable-libfribidi --enable-libmp3lame --enable-libopus --enable-libvpx --enable-librist \
|
||||||
|
--enable-libsrt --enable-libx264 --enable-libx265 --enable-libzmq --enable-libsvtav1 --enable-libdav1d && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
cd / && \
|
||||||
|
cp /opt/ffmpeg/bin/ff* /usr/local/bin/ && \
|
||||||
|
rm -rf $BUILD $LOCALDESTDIR && \
|
||||||
|
dnf -y remove autoconf automake cmake diffutils file gcc ninja-build nasm gcc-c++ git libtool make perl yasm meson \
|
||||||
|
x264-devel zlib-devel expat-devel fontconfig-devel libxml2-devel lame-devel libpng-devel numactl-devel \
|
||||||
|
fribidi-devel zeromq-devel freetype-devel opus-devel libass-devel openssl-devel && \
|
||||||
|
dnf autoremove -y && \
|
||||||
|
dnf clean all
|
||||||
|
|
||||||
|
FROM base
|
||||||
|
|
||||||
|
ARG FFPLAYOUT_VERSION=0.22.0
|
||||||
|
|
||||||
|
ENV LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib
|
||||||
|
|
||||||
|
COPY --from=build /usr/local/ /usr/local/
|
||||||
|
|
||||||
|
ADD ./overide.conf /etc/systemd/system/ffplayout.service.d/overide.conf
|
||||||
|
ADD ./overide.conf /etc/systemd/system/ffpapi.service.d/overide.conf
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
dnf update -y \
|
||||||
|
dnf install -y epel-release && \
|
||||||
|
dnf install -y 'dnf-command(config-manager)' && \
|
||||||
|
dnf config-manager --set-enabled crb && \
|
||||||
|
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/free/el/rpmfusion-free-release-$(rpm -E %rhel).noarch.rpm && \
|
||||||
|
dnf install -y --nogpgcheck https://mirrors.rpmfusion.org/nonfree/el/rpmfusion-nonfree-release-$(rpm -E %rhel).noarch.rpm && \
|
||||||
|
dnf install -y wget dejavu-sans-fonts sudo x264-libs fontconfig lame libpng numactl fribidi zeromq freetype opus libass && \
|
||||||
|
wget -q -O /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm" && \
|
||||||
|
dnf install -y /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
|
||||||
|
dnf clean all && \
|
||||||
|
rm /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm && \
|
||||||
|
mkdir -p /home/ffpu && chown -R ffpu: /home/ffpu && \
|
||||||
|
systemctl enable ffplayout && \
|
||||||
|
systemctl enable ffpapi && \
|
||||||
|
ffpapi -u admin -p admin -m contact@example.com
|
||||||
|
|
||||||
|
EXPOSE 8787
|
||||||
|
|
||||||
|
CMD ["/usr/sbin/init"]
|
@ -1,40 +0,0 @@
|
|||||||
FROM alpine:latest
|
|
||||||
|
|
||||||
ARG FFPLAYOUT_VERSION=0.24.0-rc3
|
|
||||||
ARG SHARED_STORAGE=false
|
|
||||||
|
|
||||||
ENV DB=/db
|
|
||||||
ENV SHARED_STORAGE=${SHARED_STORAGE}
|
|
||||||
|
|
||||||
COPY --from=ffmpeg-build /usr/local/bin/ffmpeg /usr/local/bin/ffmpeg
|
|
||||||
COPY --from=ffmpeg-build /usr/local/bin/ffprobe /usr/local/bin/ffprobe
|
|
||||||
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
|
|
||||||
|
|
||||||
COPY <<-EOT /run.sh
|
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
if [ ! -f /db/ffplayout.db ]; then
|
|
||||||
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
|
|
||||||
fi
|
|
||||||
|
|
||||||
/usr/bin/ffplayout -l "0.0.0.0:8787"
|
|
||||||
EOT
|
|
||||||
|
|
||||||
RUN apk update && \
|
|
||||||
apk upgrade && \
|
|
||||||
apk add --no-cache sqlite font-dejavu && \
|
|
||||||
chmod +x /run.sh
|
|
||||||
|
|
||||||
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
|
|
||||||
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
|
|
||||||
cd /tmp && \
|
|
||||||
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
|
|
||||||
cp ffplayout /usr/bin/ && \
|
|
||||||
mkdir -p /usr/share/ffplayout/ && \
|
|
||||||
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
|
|
||||||
rm -rf /tmp/* && \
|
|
||||||
mkdir ${DB}
|
|
||||||
|
|
||||||
EXPOSE 8787
|
|
||||||
|
|
||||||
CMD ["/run.sh"]
|
|
113
docker/nvidia-centos7.Dockerfile
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
FROM nvidia/cuda:12.0.1-cudnn8-runtime-centos7
|
||||||
|
|
||||||
|
ENV FFPLAYOUT_VERSION=0.22.0
|
||||||
|
ENV NVIDIA_VISIBLE_DEVICES all
|
||||||
|
ENV NVIDIA_DRIVER_CAPABILITIES compute,video,utility
|
||||||
|
|
||||||
|
ENV NVCODEC_VERSION 8.2.15.6
|
||||||
|
ENV FFMPEG_VERSION 5.1.2
|
||||||
|
ENV X264_VERSION=20191217-2245
|
||||||
|
ENV NASM_VERSION=2.14.02
|
||||||
|
ENV FDKAAC_VERSION=0.1.5
|
||||||
|
RUN yum install -y wget
|
||||||
|
|
||||||
|
RUN buildDeps="autoconf \
|
||||||
|
automake \
|
||||||
|
bzip2 \
|
||||||
|
cmake3 \
|
||||||
|
diffutils \
|
||||||
|
expat-devel \
|
||||||
|
file \
|
||||||
|
gcc \
|
||||||
|
gcc-c++ \
|
||||||
|
git \
|
||||||
|
gperf \
|
||||||
|
libtool \
|
||||||
|
make \
|
||||||
|
perl \
|
||||||
|
python3 \
|
||||||
|
openssl-devel \
|
||||||
|
tar \
|
||||||
|
yasm \
|
||||||
|
which \
|
||||||
|
zlib-devel" && \
|
||||||
|
echo "${SRC}/lib" > /etc/ld.so.conf.d/libc.conf && \
|
||||||
|
yum --enablerepo=extras install -y epel-release && \
|
||||||
|
yum --enablerepo=epel install -y ${buildDeps} && \
|
||||||
|
alternatives --install /usr/bin/cmake cmake /usr/bin/cmake3 0 && \
|
||||||
|
# Install the tools required to build nasm 2.14.02 \
|
||||||
|
nasmDeps="asciidoc \
|
||||||
|
perl-Font-TTF \
|
||||||
|
perl-Sort-Versions \
|
||||||
|
xmlto" && \
|
||||||
|
yum --enablerepo=epel install -y ${nasmDeps}
|
||||||
|
RUN curl -fsSLO https://www.nasm.us/pub/nasm/releasebuilds/$NASM_VERSION/nasm-$NASM_VERSION.tar.bz2 \
|
||||||
|
&& tar -xjf nasm-$NASM_VERSION.tar.bz2 \
|
||||||
|
&& cd nasm-$NASM_VERSION \
|
||||||
|
&& ./autogen.sh \
|
||||||
|
&& ./configure \
|
||||||
|
&& make -j$(nproc) \
|
||||||
|
&& make install
|
||||||
|
RUN \
|
||||||
|
DIR=/tmp/x264 && \
|
||||||
|
mkdir -p ${DIR} && \
|
||||||
|
cd ${DIR} && yum install -y wget && \
|
||||||
|
wget https://download.videolan.org/pub/videolan/x264/snapshots/x264-snapshot-20191217-2245.tar.bz2 && \
|
||||||
|
tar -xjf x264-snapshot-${X264_VERSION}.tar.bz2 && cd x264-snapshot-${X264_VERSION} && \
|
||||||
|
./configure --enable-shared --enable-pic --disable-cli && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install
|
||||||
|
|
||||||
|
### fdk-aac https://github.com/mstorsjo/fdk-aac
|
||||||
|
RUN \
|
||||||
|
DIR=/tmp/fdk-aac && \
|
||||||
|
mkdir -p ${DIR} && \
|
||||||
|
cd ${DIR} && \
|
||||||
|
curl -sL https://github.com/mstorsjo/fdk-aac/archive/v${FDKAAC_VERSION}.tar.gz | \
|
||||||
|
tar -zx --strip-components=1 && \
|
||||||
|
autoreconf -fiv && \
|
||||||
|
./configure --enable-shared --datadir="${DIR}" && \
|
||||||
|
make -j $(nproc | awk '{print $1 / 2}') && \
|
||||||
|
make install && \
|
||||||
|
rm -rf ${DIR}
|
||||||
|
|
||||||
|
RUN git clone --depth 1 https://git.videolan.org/git/ffmpeg/nv-codec-headers \
|
||||||
|
&& cd nv-codec-headers \
|
||||||
|
&& make install
|
||||||
|
ENV PKG_CONFIG_PATH /usr/local/lib/pkgconfig
|
||||||
|
RUN curl -fsSLO https://ffmpeg.org/releases/ffmpeg-$FFMPEG_VERSION.tar.bz2 \
|
||||||
|
&& tar -xjf ffmpeg-$FFMPEG_VERSION.tar.bz2 \
|
||||||
|
&& cd ffmpeg-$FFMPEG_VERSION \
|
||||||
|
&& ./configure --enable-nvenc --enable-libx264 --enable-gpl --enable-libfdk_aac --enable-nonfree --enable-postproc --enable-shared --enable-version3 \
|
||||||
|
&& make -j$(nproc) \
|
||||||
|
&& make install
|
||||||
|
|
||||||
|
RUN yum -y install systemd vim pico; yum clean all; \
|
||||||
|
(cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == systemd-tmpfiles-setup.service ] || rm -f $i; done); \
|
||||||
|
rm -f /lib/systemd/system/multi-user.target.wants/*;\
|
||||||
|
rm -f /etc/systemd/system/*.wants/*;\
|
||||||
|
rm -f /lib/systemd/system/local-fs.target.wants/*; \
|
||||||
|
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
|
||||||
|
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
|
||||||
|
rm -f /lib/systemd/system/basic.target.wants/*;\
|
||||||
|
rm -f /lib/systemd/system/anaconda.target.wants/*;
|
||||||
|
RUN yum -y install net-tools openssh-server
|
||||||
|
RUN echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
|
||||||
|
|
||||||
|
RUN yum update -y \
|
||||||
|
&& yum install -y dejavu-sans-fonts sudo wget \
|
||||||
|
&& wget -q -O /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm" \
|
||||||
|
&& yum install -y /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm \
|
||||||
|
&& yum clean all \
|
||||||
|
&& echo 'Docker!' | passwd --stdin root \
|
||||||
|
&& rm /tmp/ffplayout-${FFPLAYOUT_VERSION}-1.x86_64.rpm \
|
||||||
|
&& mkdir -p /home/ffpu && chown -R ffpu: /home/ffpu \
|
||||||
|
&& systemctl enable ffplayout \
|
||||||
|
&& systemctl enable ffpapi
|
||||||
|
|
||||||
|
EXPOSE 8787
|
||||||
|
RUN echo "/usr/local/lib" >> /etc/ld.so.conf.d/nvidia.conf
|
||||||
|
RUN echo "/usr/local/cuda/compat/" >> /etc/ld.so.conf.d/nvidia.conf
|
||||||
|
|
||||||
|
VOLUME [ "/tmp", "/run", "/run/lock", "/etc/ffplayout", "/usr/share/ffplayout" ,"/var/lib/ffplayout" ]
|
||||||
|
CMD ["/usr/sbin/init"]
|
@ -1,227 +0,0 @@
|
|||||||
FROM nvidia/cuda:12.5.0-runtime-rockylinux9
|
|
||||||
|
|
||||||
ARG FFPLAYOUT_VERSION=0.24.0-rc3
|
|
||||||
ARG SHARED_STORAGE=false
|
|
||||||
|
|
||||||
ENV DB=/db
|
|
||||||
ENV SHARED_STORAGE=${SHARED_STORAGE}
|
|
||||||
|
|
||||||
ENV EXTRA_CFLAGS=-march=generic \
|
|
||||||
LOCALBUILDDIR=/tmp/build \
|
|
||||||
LOCALDESTDIR=/tmp/local \
|
|
||||||
PKG_CONFIG="pkg-config --static" \
|
|
||||||
PKG_CONFIG_PATH="/usr/lib64/pkgconfig/:/tmp/local/lib/pkgconfig" \
|
|
||||||
CPPFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
|
|
||||||
CFLAGS="-I/tmp/local/include -O3 -fno-strict-overflow -fstack-protector-all -fPIC" \
|
|
||||||
CXXFLAGS="-I/tmp/local/include -O2 -fPIC" \
|
|
||||||
LDFLAGS="-L/tmp/local/lib -pipe -Wl,-z,relro,-z,now -static" \
|
|
||||||
CC=clang
|
|
||||||
|
|
||||||
RUN dnf clean all -y && \
|
|
||||||
dnf makecache --refresh && \
|
|
||||||
dnf install -y epel-release && \
|
|
||||||
dnf config-manager --set-enabled crb
|
|
||||||
|
|
||||||
RUN dnf install -y which sqlite libstdc++-static libtool autoconf clang \
|
|
||||||
cmake ninja-build cargo ragel meson git pkgconfig bzip2 \
|
|
||||||
python3-devel gperf perl glibc-static binutils-devel \
|
|
||||||
nasm rsync wget
|
|
||||||
|
|
||||||
WORKDIR /tmp
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "zlib-1.3.1.tar.gz" "https://zlib.net/zlib-1.3.1.tar.gz" && \
|
|
||||||
tar xf "zlib-1.3.1.tar.gz" && \
|
|
||||||
cd "zlib-1.3.1" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --static && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "openssl-1.1.1u.tar.gz" "https://www.openssl.org/source/openssl-1.1.1u.tar.gz" && \
|
|
||||||
tar xf "openssl-1.1.1u.tar.gz" && \
|
|
||||||
cd "openssl-1.1.1u" && \
|
|
||||||
./Configure --prefix=$LOCALDESTDIR --openssldir=$LOCALDESTDIR linux-x86_64 --libdir="$LOCALDESTDIR/lib" no-shared enable-camellia enable-idea enable-mdc2 enable-rfc3779 -static-libstdc++ -static-libgcc && \
|
|
||||||
make depend all && \
|
|
||||||
make install_sw
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "bzip2-1.0.8.tar.gz" "https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz" && \
|
|
||||||
tar xf "bzip2-1.0.8.tar.gz" && \
|
|
||||||
cd "bzip2-1.0.8" && \
|
|
||||||
make install PREFIX="$LOCALDESTDIR"
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "libpng-1.6.40.tar.gz" "http://prdownloads.sourceforge.net/libpng/libpng-1.6.40.tar.gz" && \
|
|
||||||
tar xf "libpng-1.6.40.tar.gz" && \
|
|
||||||
cd "libpng-1.6.40" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --disable-shared && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/fribidi/fribidi.git" && cd fribidi && \
|
|
||||||
./autogen.sh && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
|
|
||||||
make -j $(nproc) 2>/dev/null || true && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "expat-2.5.0.tar.bz2" "https://github.com/libexpat/libexpat/releases/download/R_2_5_0/expat-2.5.0.tar.bz2" && \
|
|
||||||
tar xf "expat-2.5.0.tar.bz2" && \
|
|
||||||
cd "expat-2.5.0" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --without-docbook && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "freetype-2.13.1.tar.gz" "https://sourceforge.net/projects/freetype/files/freetype2/2.13.1/freetype-2.13.1.tar.gz" && \
|
|
||||||
tar xf "freetype-2.13.1.tar.gz" && \
|
|
||||||
cd "freetype-2.13.1" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --disable-shared --with-harfbuzz=no && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "fontconfig-2.14.2.tar.gz" "https://www.freedesktop.org/software/fontconfig/release/fontconfig-2.14.2.tar.gz" && \
|
|
||||||
tar xf "fontconfig-2.14.2.tar.gz" && \
|
|
||||||
cd "fontconfig-2.14.2" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install && \
|
|
||||||
cp fontconfig.pc "$LOCALDESTDIR/lib/pkgconfig/"
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/harfbuzz/harfbuzz.git" && cd harfbuzz && \
|
|
||||||
mkdir build && cd build && \
|
|
||||||
meson setup -Denable_tools=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
|
|
||||||
ninja && \
|
|
||||||
ninja install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/zeromq/libzmq.git" && cd libzmq && \
|
|
||||||
./autogen.sh && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-static --disable-shared && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/libass/libass.git" && cd libass && \
|
|
||||||
./autogen.sh && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --disable-harfbuzz && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/mstorsjo/fdk-aac" && cd fdk-aac && \
|
|
||||||
./autogen.sh && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "lame-3.100.tar.gz" "https://downloads.sourceforge.net/project/lame/lame/3.100/lame-3.100.tar.gz" && \
|
|
||||||
tar xf "lame-3.100.tar.gz" && \
|
|
||||||
cd "lame-3.100" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-expopt=full --enable-shared=no && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN curl --retry 20 --retry-max-time 5 -L -k -f -w "%{response_code}" -o "opus-1.4.tar.gz" "https://ftp.osuosl.org/pub/xiph/releases/opus/opus-1.4.tar.gz" && \
|
|
||||||
tar xf "opus-1.4.tar.gz" && \
|
|
||||||
cd "opus-1.4" && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-shared=no --enable-static --disable-doc && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://github.com/Haivision/srt.git" && cd srt && \
|
|
||||||
mkdir build && \
|
|
||||||
cd build && \
|
|
||||||
cmake .. -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DOPENSSL_USE_STATIC_LIBS=ON -DUSE_STATIC_LIBSTDCXX:BOOLEAN=ON -DENABLE_CXX11:BOOLEAN=ON -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" -DENABLE_APPS=0 -DENABLE_EXAMPLES=0 && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone "https://github.com/webmproject/libvpx.git" && cd libvpx && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --as=nasm --disable-shared --enable-static --disable-unit-tests --disable-docs --enable-postproc --enable-vp9-postproc --enable-runtime-cpu-detect && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone "https://code.videolan.org/videolan/x264" && cd x264 && \
|
|
||||||
./configure --prefix="$LOCALDESTDIR" --enable-static && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone "https://bitbucket.org/multicoreware/x265_git.git" && cd x265_git/build && \
|
|
||||||
cmake ../source -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DENABLE_SHARED:BOOLEAN=OFF -DCMAKE_CXX_FLAGS_RELEASE:STRING="-O3 -DNDEBUG $CXXFLAGS" && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://gitlab.com/AOMediaCodec/SVT-AV1.git" && cd SVT-AV1/Build && \
|
|
||||||
cmake .. -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX="$LOCALDESTDIR" -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_BINDIR="bin" -DCMAKE_INSTALL_LIBDIR="lib" -DCMAKE_INSTALL_INCLUDEDIR="include" && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 "https://code.videolan.org/videolan/dav1d.git" && cd dav1d && \
|
|
||||||
mkdir build && cd build && \
|
|
||||||
meson setup -Denable_tools=false -Denable_tests=false --default-library=static .. --prefix "$LOCALDESTDIR" --libdir="$LOCALDESTDIR/lib" && \
|
|
||||||
ninja && \
|
|
||||||
ninja install
|
|
||||||
|
|
||||||
RUN git clone --depth 1 https://git.videolan.org/git/ffmpeg/nv-codec-headers && cd nv-codec-headers && \
|
|
||||||
make install PREFIX="$LOCALDESTDIR"
|
|
||||||
|
|
||||||
RUN git clone --depth 1 https://git.ffmpeg.org/ffmpeg.git && cd ffmpeg && \
|
|
||||||
./configure \
|
|
||||||
--pkg-config-flags=--static \
|
|
||||||
--extra-cflags="-fopenmp -DZMG_STATIC" \
|
|
||||||
--extra-ldflags="-fopenmp -Wl,--copy-dt-needed-entries -Wl,--allow-multiple-definition" \
|
|
||||||
--enable-runtime-cpudetect \
|
|
||||||
--prefix=/usr/local \
|
|
||||||
--disable-debug \
|
|
||||||
--disable-doc \
|
|
||||||
--disable-ffplay \
|
|
||||||
--disable-shared \
|
|
||||||
--enable-gpl \
|
|
||||||
--enable-version3 \
|
|
||||||
--enable-nonfree \
|
|
||||||
--enable-small \
|
|
||||||
--enable-static \
|
|
||||||
--enable-libass \
|
|
||||||
--enable-fontconfig \
|
|
||||||
--enable-libfdk-aac \
|
|
||||||
--enable-libfribidi \
|
|
||||||
--enable-libfreetype \
|
|
||||||
--enable-libharfbuzz \
|
|
||||||
--enable-libmp3lame \
|
|
||||||
--enable-libopus \
|
|
||||||
--enable-libsrt \
|
|
||||||
--enable-libvpx \
|
|
||||||
--enable-libx264 \
|
|
||||||
--enable-libx265 \
|
|
||||||
--enable-libzmq \
|
|
||||||
--enable-nonfree \
|
|
||||||
--enable-openssl \
|
|
||||||
--enable-libsvtav1 \
|
|
||||||
--enable-libdav1d \
|
|
||||||
--enable-nvenc && \
|
|
||||||
make -j $(nproc) && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
RUN strip /usr/local/bin/ffmpeg /usr/local/bin/ffprobe
|
|
||||||
|
|
||||||
WORKDIR /
|
|
||||||
|
|
||||||
COPY README.md ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.* /tmp/
|
|
||||||
|
|
||||||
COPY <<-EOT /run.sh
|
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
if [ ! -f /db/ffplayout.db ]; then
|
|
||||||
ffplayout -i -u admin -p admin -m contact@example.com --storage "/tv-media" --playlists "/playlists" --public "/public" --logs "/logging" --mail-smtp "mail.example.org" --mail-user "admin@example.org" --mail-password "" --mail-starttls
|
|
||||||
fi
|
|
||||||
|
|
||||||
/usr/bin/ffplayout -l "0.0.0.0:8787"
|
|
||||||
EOT
|
|
||||||
|
|
||||||
RUN chmod +x /run.sh
|
|
||||||
|
|
||||||
RUN [[ -f "/tmp/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" ]] || \
|
|
||||||
wget -q "https://github.com/ffplayout/ffplayout/releases/download/v${FFPLAYOUT_VERSION}/ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" -P /tmp/ && \
|
|
||||||
cd /tmp && \
|
|
||||||
tar xf "ffplayout-v${FFPLAYOUT_VERSION}_x86_64-unknown-linux-musl.tar.gz" && \
|
|
||||||
cp ffplayout /usr/bin/ && \
|
|
||||||
mmkdir -p /usr/share/ffplayout/ && \
|
|
||||||
cp assets/dummy.vtt assets/logo.png assets/DejaVuSans.ttf assets/FONT_LICENSE.txt /usr/share/ffplayout/ && \
|
|
||||||
rm -rf /tmp/* && \
|
|
||||||
mkdir ${DB}
|
|
||||||
|
|
||||||
EXPOSE 8787
|
|
||||||
|
|
||||||
CMD ["/run.sh"]
|
|
@ -1,6 +1,6 @@
|
|||||||
## Advanced settings
|
## Advanced settings
|
||||||
|
|
||||||
With **advanced settings** you can control all ffmpeg inputs/decoder/output and filters.
|
Within **/etc/ffplayout/advanced.yml** you can control all ffmpeg inputs/decoder output and filters.
|
||||||
|
|
||||||
> **_Note:_** Changing these settings is for advanced users only! There will be no support or guarantee that it will work and be stable after changing them!
|
> **_Note:_** Changing these settings is for advanced users only! There will be no support or guarantee that it will work and be stable after changing them!
|
||||||
|
|
||||||
|
68
docs/api.md
@ -1,9 +1,9 @@
|
|||||||
### Possible endpoints
|
## Possible endpoints
|
||||||
|
|
||||||
Run the API thru the systemd service, or like:
|
Run the API thru the systemd service, or like:
|
||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
ffplayout -l 127.0.0.1:8787
|
ffpapi -l 127.0.0.1:8787
|
||||||
```
|
```
|
||||||
|
|
||||||
For all endpoints an (Bearer) authentication is required.\
|
For all endpoints an (Bearer) authentication is required.\
|
||||||
@ -72,7 +72,7 @@ curl -X GET 'http://127.0.0.1:8787/api/user/2' -H 'Content-Type: application/jso
|
|||||||
-H 'Authorization: Bearer <TOKEN>'
|
-H 'Authorization: Bearer <TOKEN>'
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Settings
|
#### ffpapi Settings
|
||||||
|
|
||||||
**Get Settings from Channel**
|
**Get Settings from Channel**
|
||||||
|
|
||||||
@ -87,7 +87,9 @@ curl -X GET http://127.0.0.1:8787/api/channel/1 -H "Authorization: Bearer <TOKEN
|
|||||||
"id": 1,
|
"id": 1,
|
||||||
"name": "Channel 1",
|
"name": "Channel 1",
|
||||||
"preview_url": "http://localhost/live/preview.m3u8",
|
"preview_url": "http://localhost/live/preview.m3u8",
|
||||||
|
"config_path": "/etc/ffplayout/ffplayout.yml",
|
||||||
"extra_extensions": "jpg,jpeg,png",
|
"extra_extensions": "jpg,jpeg,png",
|
||||||
|
"service": "ffplayout.service",
|
||||||
"utc_offset": "+120"
|
"utc_offset": "+120"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -102,7 +104,7 @@ curl -X GET http://127.0.0.1:8787/api/channels -H "Authorization: Bearer <TOKEN>
|
|||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
curl -X PATCH http://127.0.0.1:8787/api/channel/1 -H "Content-Type: application/json" \
|
curl -X PATCH http://127.0.0.1:8787/api/channel/1 -H "Content-Type: application/json" \
|
||||||
-d '{ "id": 1, "name": "Channel 1", "preview_url": "http://localhost/live/stream.m3u8", "extra_extensions": "jpg,jpeg,png"}' \
|
-d '{ "id": 1, "name": "Channel 1", "preview_url": "http://localhost/live/stream.m3u8", "config_path": "/etc/ffplayout/ffplayout.yml", "extra_extensions": "jpg,jpeg,png"}' \
|
||||||
-H "Authorization: Bearer <TOKEN>"
|
-H "Authorization: Bearer <TOKEN>"
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -110,7 +112,7 @@ curl -X PATCH http://127.0.0.1:8787/api/channel/1 -H "Content-Type: application/
|
|||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
curl -X POST http://127.0.0.1:8787/api/channel/ -H "Content-Type: application/json" \
|
curl -X POST http://127.0.0.1:8787/api/channel/ -H "Content-Type: application/json" \
|
||||||
-d '{ "name": "Channel 2", "preview_url": "http://localhost/live/channel2.m3u8", "extra_extensions": "jpg,jpeg,png" }' \
|
-d '{ "name": "Channel 2", "preview_url": "http://localhost/live/channel2.m3u8", "config_path": "/etc/ffplayout/channel2.yml", "extra_extensions": "jpg,jpeg,png", "service": "ffplayout@channel2.service" }' \
|
||||||
-H "Authorization: Bearer <TOKEN>"
|
-H "Authorization: Bearer <TOKEN>"
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -122,28 +124,13 @@ curl -X DELETE http://127.0.0.1:8787/api/channel/2 -H "Authorization: Bearer <TO
|
|||||||
|
|
||||||
#### ffplayout Config
|
#### ffplayout Config
|
||||||
|
|
||||||
**Get Advanced Config**
|
|
||||||
|
|
||||||
```BASH
|
|
||||||
curl -X GET http://127.0.0.1:8787/api/playout/advanced/1 -H 'Authorization: Bearer <TOKEN>'
|
|
||||||
```
|
|
||||||
|
|
||||||
Response is a JSON object
|
|
||||||
|
|
||||||
**Update Advanced Config**
|
|
||||||
|
|
||||||
```BASH
|
|
||||||
curl -X PUT http://127.0.0.1:8787/api/playout/advanced/1 -H "Content-Type: application/json" \
|
|
||||||
-d { <CONFIG DATA> } -H 'Authorization: Bearer <TOKEN>'
|
|
||||||
```
|
|
||||||
|
|
||||||
**Get Config**
|
**Get Config**
|
||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
curl -X GET http://127.0.0.1:8787/api/playout/config/1 -H 'Authorization: Bearer <TOKEN>'
|
curl -X GET http://127.0.0.1:8787/api/playout/config/1 -H 'Authorization: Bearer <TOKEN>'
|
||||||
```
|
```
|
||||||
|
|
||||||
Response is a JSON object
|
Response is a JSON object from the ffplayout.yml
|
||||||
|
|
||||||
**Update Config**
|
**Update Config**
|
||||||
|
|
||||||
@ -174,7 +161,7 @@ curl -X PUT http://127.0.0.1:8787/api/presets/1 -H 'Content-Type: application/js
|
|||||||
**Add new Preset**
|
**Add new Preset**
|
||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
curl -X POST http://127.0.0.1:8787/api/presets/1/ -H 'Content-Type: application/json' \
|
curl -X POST http://127.0.0.1:8787/api/presets/ -H 'Content-Type: application/json' \
|
||||||
-d '{ "name": "<PRESET NAME>", "text": "TEXT>", "x": "<X>", "y": "<Y>", "fontsize": 24, "line_spacing": 4, "fontcolor": "#ffffff", "box": 1, "boxcolor": "#000000", "boxborderw": 4, "alpha": 1.0, "channel_id": 1 }' \
|
-d '{ "name": "<PRESET NAME>", "text": "TEXT>", "x": "<X>", "y": "<Y>", "fontsize": 24, "line_spacing": 4, "fontcolor": "#ffffff", "box": 1, "boxcolor": "#000000", "boxborderw": 4, "alpha": 1.0, "channel_id": 1 }' \
|
||||||
-H 'Authorization: Bearer <TOKEN>'
|
-H 'Authorization: Bearer <TOKEN>'
|
||||||
```
|
```
|
||||||
@ -223,19 +210,38 @@ curl -X GET http://127.0.0.1:8787/api/control/1/media/current
|
|||||||
**Response:**
|
**Response:**
|
||||||
|
|
||||||
```JSON
|
```JSON
|
||||||
{
|
{
|
||||||
"media": {
|
"jsonrpc": "2.0",
|
||||||
|
"result": {
|
||||||
|
"current_media": {
|
||||||
"category": "",
|
"category": "",
|
||||||
"duration": 154.2,
|
"duration": 154.2,
|
||||||
"out": 154.2,
|
"out": 154.2,
|
||||||
"in": 0.0,
|
"seek": 0.0,
|
||||||
"source": "/opt/tv-media/clip.mp4"
|
"source": "/opt/tv-media/clip.mp4"
|
||||||
},
|
},
|
||||||
"index": 39,
|
"index": 39,
|
||||||
"ingest": false,
|
"play_mode": "playlist",
|
||||||
"mode": "playlist",
|
"played_sec": 67.80771999300123,
|
||||||
"played": 67.808
|
"remaining_sec": 86.39228000699876,
|
||||||
}
|
"start_sec": 24713.631999999998,
|
||||||
|
"start_time": "06:51:53.631"
|
||||||
|
},
|
||||||
|
"id": 1
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Get next Clip**
|
||||||
|
|
||||||
|
```BASH
|
||||||
|
curl -X GET http://127.0.0.1:8787/api/control/1/media/next -H 'Authorization: Bearer <TOKEN>'
|
||||||
|
```
|
||||||
|
|
||||||
|
**Get last Clip**
|
||||||
|
|
||||||
|
```BASH
|
||||||
|
curl -X GET http://127.0.0.1:8787/api/control/1/media/last
|
||||||
|
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
|
||||||
```
|
```
|
||||||
|
|
||||||
#### ffplayout Process Control
|
#### ffplayout Process Control
|
||||||
@ -297,10 +303,10 @@ curl -X DELETE http://127.0.0.1:8787/api/playlist/1/2022-06-20
|
|||||||
|
|
||||||
### Log file
|
### Log file
|
||||||
|
|
||||||
**Read Log File**
|
**Read Log Life**
|
||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
curl -X GET http://127.0.0.1:8787/api/log/1?date=2022-06-20
|
curl -X GET http://127.0.0.1:8787/api/log/1
|
||||||
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
|
-H 'Content-Type: application/json' -H 'Authorization: Bearer <TOKEN>'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -1,23 +0,0 @@
|
|||||||
## Closed Captions
|
|
||||||
|
|
||||||
#### Note:
|
|
||||||
**This is only an _experimental feature_. Please be aware that bugs and unexpected behavior may occur. To utilize this feature, a version after 7.1 of FFmpeg is required. Importantly, there is currently no official support for this functionality.**
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
**ffplayout** can handle closed captions in WebVTT format for HLS streaming.
|
|
||||||
|
|
||||||
The captions can be embedded in the file, such as in a [Matroska](https://www.matroska.org/technical/subtitles.html) file, or they can be a separate *.vtt file that shares the same filename as the video file. In either case, the processing option **vtt_enable** must be enabled, and the path to the **vtt_dummy** file must exist.
|
|
||||||
|
|
||||||
To encode the closed captions, the **hls** mode needs to be enabled, and specific output parameters must be provided. Here’s an example:
|
|
||||||
|
|
||||||
```
|
|
||||||
-c:v libx264 -crf 23 -x264-params keyint=50:min-keyint=25:scenecut=-1 \
|
|
||||||
-maxrate 1300k -bufsize 2600k -preset faster -tune zerolatency \
|
|
||||||
-profile:v Main -level 3.1 -c:a aac -ar 44100 -b:a 128k -flags +cgop \
|
|
||||||
-muxpreload 0 -muxdelay 0 -f hls -hls_time 6 -hls_list_size 600 \
|
|
||||||
-hls_flags append_list+delete_segments+omit_endlist \
|
|
||||||
-var_stream_map v:0,a:0,s:0,sgroup:subs,sname:English,language:en-US,default:YES \
|
|
||||||
-master_pl_name master.m3u8 \
|
|
||||||
-hls_segment_filename \
|
|
||||||
live/stream-%d.ts live/stream.m3u8
|
|
||||||
```
|
|
@ -1,10 +1,10 @@
|
|||||||
## Custom filter
|
## Custom filter
|
||||||
|
|
||||||
ffplayout allows the definition of a custom filter string. For this, the parameter **custom_filter** is available in the playout configuration under **processing**. The playlist can also contain a **custom_filter** parameter for each clip, with the same usage.
|
ffplayout allows it to define a custom filter string. For that is the parameter **custom_filter** in the **ffplayout.yml** config file under **processing**. The playlist can also contain a **custom_filter** parameter for every clip, with the same usage.
|
||||||
|
|
||||||
The filter outputs should end with `[c_v_out]` for video filters and `[c_a_out]` for audio filters. The filters will be applied to every clip and after the filters that unify the clips.
|
The filter outputs should end with `[c_v_out]` for video filter, and `[c_a_out]` for audio filter. The filters will be apply on every clip and after the filters which unify the clips.
|
||||||
|
|
||||||
It is possible to apply only video filters, only audio filters, or both. For a better understanding, here are some examples:
|
It is possible to apply only video or audio filters, or both. For a better understanding here some examples:
|
||||||
|
|
||||||
#### Apply Gaussian blur and volume filter:
|
#### Apply Gaussian blur and volume filter:
|
||||||
|
|
||||||
@ -51,7 +51,7 @@ The **custom filter** from **config -> processing** and from **playlist** got ap
|
|||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
flowchart LR
|
flowchart LR
|
||||||
|
|
||||||
subgraph fileloop["file loop"]
|
subgraph fileloop["file loop"]
|
||||||
direction LR
|
direction LR
|
||||||
Input --> dec
|
Input --> dec
|
||||||
@ -84,6 +84,7 @@ custom_filter: "[v_in];movie=image_input.png:s=v,loop=loop=250.0:size=1:start=0,
|
|||||||
And here are the explanation for each filter:
|
And here are the explanation for each filter:
|
||||||
|
|
||||||
```PYTHON
|
```PYTHON
|
||||||
|
|
||||||
# get input from video
|
# get input from video
|
||||||
[v_in];
|
[v_in];
|
||||||
|
|
||||||
|
@ -68,47 +68,3 @@ cargo deb --no-build --target=aarch64-unknown-linux-gnu --variant=arm64 -p ffpla
|
|||||||
# for rhel based systems:
|
# for rhel based systems:
|
||||||
cargo generate-rpm --target=x86_64-unknown-linux-musl
|
cargo generate-rpm --target=x86_64-unknown-linux-musl
|
||||||
```
|
```
|
||||||
|
|
||||||
## Generate types for Frontend
|
|
||||||
The frontend uses TypeScript, to generate types for the rust structs run: `cargo test`.
|
|
||||||
|
|
||||||
The generated types are then in [types folder](/frontend/types).
|
|
||||||
|
|
||||||
## Setup Frontend
|
|
||||||
|
|
||||||
Make sure to install the dependencies:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# yarn
|
|
||||||
yarn install
|
|
||||||
|
|
||||||
# npm
|
|
||||||
npm install
|
|
||||||
|
|
||||||
# pnpm
|
|
||||||
pnpm install --shamefully-hoist
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development Server
|
|
||||||
|
|
||||||
Start the development server on http://localhost:3000
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run dev
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production
|
|
||||||
|
|
||||||
Build the application for production:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run build
|
|
||||||
```
|
|
||||||
|
|
||||||
Locally preview production build:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run preview
|
|
||||||
```
|
|
||||||
|
|
||||||
Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information.
|
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
### Folder Mode
|
### Folder Mode
|
||||||
|
|
||||||
ffplayout can play files from a folder; no playlists are required for this mode. This folder is monitored for changes, and when new files are added or deleted, they are registered and updated accordingly.
|
ffplayout can play files from a folder, no playlists are required for this mode. This folder is monitored for changes, and when new files are added or deleted, this is registered and updated accordingly.
|
||||||
|
|
||||||
You just need to set `mode: folder` in the config under `processing:`, and under `storage:`, you have to specify the correct folder and the file extensions you want to scan for.
|
You just have to set `mode: folder` in the config under `processing:` and under `storage:` you have to enter the correct folder and the file extensions you want to scan for.
|
||||||
|
|
||||||
Additionally, there is a **shuffle** mode. If this is activated, the files will be played randomly.
|
Additionally there is a **shuffle** mode, if this is activated, the files will be played randomly.
|
||||||
|
|
||||||
If shuffle mode is off, the clips will be played in sorted order.
|
If shuffle mode is off, the clips will be played in sorted order.
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 60 KiB |
Before Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 306 KiB |
Before Width: | Height: | Size: 33 KiB |
Before Width: | Height: | Size: 180 KiB |
Before Width: | Height: | Size: 47 KiB |
Before Width: | Height: | Size: 173 KiB |
@ -1,12 +1,12 @@
|
|||||||
In some situations, application closure may occur in conjunction with Live Ingest.
|
In some situations, application closure may occur in conjunction with Live Ingest.
|
||||||
|
|
||||||
Here is an example in combination with SRS:
|
Here is an example, in combination with SRS:
|
||||||
|
|
||||||
When a live stream is sent, it is forwarded to ffplayout, which then switches the TV program to the live stream.
|
When a live stream is sent, it is forwarded to ffplayout, which then switches the TV program to the live stream.
|
||||||
|
|
||||||
Problems can occur if the internet connection for the live stream is not stable. In such cases, timeouts can occur, SRS breaks the connection to the playout, and the entire ffplayout process has to be restarted. The default timeout is 5000ms, or 5 seconds.
|
Problems now occur if the internet connection for the live stream is not stable. Then timeouts can occur, SRS breaks the connection to the playout and the whole ffplayout process has to be restarted. The default timeout is 5000ms, i.e. 5 seconds.
|
||||||
|
|
||||||
The timeout can be changed in SRS in the respective vhosts with:
|
The timeout can be heard in SRS in the respective vhosts with:
|
||||||
|
|
||||||
```NGINX
|
```NGINX
|
||||||
publish {
|
publish {
|
||||||
|
@ -1,29 +1,38 @@
|
|||||||
### Install ffplayout
|
### Install ffplayout
|
||||||
|
|
||||||
**Note:** This is the official and supported way.
|
ffplayout provides ***.deb** and ***.rpm** packages, which makes it more easy to install and use, but there is still some steps to do.
|
||||||
|
|
||||||
ffplayout provides ***.deb** and ***.rpm** packages, which makes it easier to install and use, but there are still some steps to follow.
|
1. download the latest ffplayout from [release](https://github.com/ffplayout/ffplayout/releases/latest) page and place the package in the **/tmp** folder.
|
||||||
|
2. install it with `apt install /tmp/ffplayout_<VERSION>_amd64.deb`
|
||||||
|
3. install ffmpeg/ffprobe, or compile and copy it to **/usr/local/bin/**
|
||||||
|
4. activate systemd services:
|
||||||
|
- `systemctl enable ffplayout`
|
||||||
|
- `systemctl enable --now ffpapi`
|
||||||
|
5. add admin user to ffpapi:
|
||||||
|
- `ffpapi -a`
|
||||||
|
6. use a revers proxy for SSL, Port is **8787**.
|
||||||
|
7. login with your browser, address without proxy would be: **http://[IP ADDRESS]:8787**
|
||||||
|
|
||||||
1. Download the latest ffplayout from the [release](https://github.com/ffplayout/ffplayout/releases/latest) page and place the package in the **/tmp** folder
|
Default location for playlists and media files are: **/var/lib/ffplayout/**.
|
||||||
2. Install it with `apt install /tmp/ffplayout_<VERSION>_amd64.deb`
|
|
||||||
3. Install ffmpeg/ffprobe, or compile and copy them to **/usr/local/bin/**
|
When you don't need the frontend and API, skip enable the systemd service **ffpapi**.
|
||||||
4. Initialize the defaults and add a global admin user: `sudo -u ffpu ffplayout -i`
|
|
||||||
5. Use a reverse proxy for SSL; the port is **8787**
|
When playlists are created and the ffplayout output is configured, you can start the process: `systemctl start ffplayout`, or click start in frontend.
|
||||||
6. Log in with your browser. The address without a proxy would be: **http://[IP ADDRESS]:8787**
|
|
||||||
|
If you want to configure ffplayout over terminal, you can edit **/etc/ffplayout/ffplayout.yml**.
|
||||||
|
|
||||||
### Manual Install
|
### Manual Install
|
||||||
|
-----
|
||||||
|
|
||||||
**Note:** This is for advanced users only.
|
- install ffmpeg/ffprobe, or compile and copy it to **/usr/local/bin/**
|
||||||
|
- download the latest archive from [release](https://github.com/ffplayout/ffplayout/releases/latest) page
|
||||||
- Install ffmpeg/ffprobe, or compile and copy them to **/usr/local/bin/**
|
- copy the ffplayout and ffpapi binary to `/usr/bin/`
|
||||||
- Download the latest archive from the [release](https://github.com/ffplayout/ffplayout/releases/latest) page
|
- copy **assets/ffplayout.yml** to `/etc/ffplayout`
|
||||||
- Copy the ffplayout binary to `/usr/bin/`
|
- create folder `/var/log/ffplayout`
|
||||||
- Copy **assets/ffplayout.yml** to `/etc/ffplayout`
|
- create system user **ffpu**
|
||||||
- Create the folder `/var/log/ffplayout`
|
- give ownership from `/etc/ffplayout` and `/var/log/ffplayout` to **ffpu**
|
||||||
- Create the system user **ffpu**
|
- copy **assets/ffpapi.service**, **assets/ffplayout.service** and **assets/ffplayout@.service** to `/etc/systemd/system`
|
||||||
- Give ownership of `/etc/ffplayout` and `/var/log/ffplayout` to **ffpu**
|
- copy **assets/11-ffplayout** to `/etc/sudoers.d/`
|
||||||
- Copy **assets/ffplayout.service** to `/etc/systemd/system`
|
- copy **assets/ffpapi.1.gz** and **assets/ffplayout.1.gz** to `/usr/share/man/man1/`
|
||||||
- Copy **assets/ffplayout.1.gz** to `/usr/share/man/man1/`
|
- copy **public** folder to `/usr/share/ffplayout/`
|
||||||
- Copy the **public** folder to `/usr/share/ffplayout/`
|
- activate service and run it: `systemctl enable --now ffpapi ffplayout`
|
||||||
- Activate the service and run it: `systemctl enable --now ffplayout`
|
|
||||||
- Initialize the defaults and add a global admin user: `sudo -u ffpu ffplayout -i`
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
### Live Ingest
|
### Live Ingest
|
||||||
|
|
||||||
With live ingest, you have the possibility to switch from playlist or folder mode to a live stream.
|
With live ingest you have the possibility to switch from playlist, or folder mode to a live stream.
|
||||||
|
|
||||||
It works by creating an ffmpeg instance in _listen_ (_server_) mode. For example, when streaming over RTMP, you can set the ingest input parameters to:
|
It works in a way, that it create a ffmpeg instance in _listen_ (_server_) mode. For example when you stream over RTMP to it, you can set the ingest input parameters to:
|
||||||
|
|
||||||
```
|
```
|
||||||
-f live_flv -listen 1 -i rtmp://0.0.0.0:1936/live/my-secrete-streaming-key
|
-f live_flv -listen 1 -i rtmp://0.0.0.0:1936/live/my-secrete-streaming-key
|
||||||
@ -14,14 +14,14 @@ For SRT you could use:
|
|||||||
-f mpegts -i 'srt://0.0.0.0:40077?mode=listener&passphrase=12345abcde'
|
-f mpegts -i 'srt://0.0.0.0:40077?mode=listener&passphrase=12345abcde'
|
||||||
```
|
```
|
||||||
|
|
||||||
Keep in mind that the ingest mode **can't** pull from a server; it can only act as its own server and listen for incoming streams.
|
Have in mind, that the ingest mode **can't** pull from a server, it only can act as its own server and listen for income.
|
||||||
|
|
||||||
When it detects an incoming stream, it will stop the currently playing content and switch to the live source. The output will not be interrupted, so you will have a continuous output stream.
|
When it notice a incoming stream, it will stop the current playing and continue the live source. The output will not interrupt, so you have a continuously output stream.
|
||||||
|
|
||||||
In rare cases, it may happen that, for a short moment after switching, the image freezes, but then it will continue. Also, a brief frame flicker might occur.
|
In rare cases it can happen, that for a short moment after switching the image freezes, but then it will continue. Also a short frame flickering can happen.
|
||||||
|
|
||||||
You should know that **ffmpeg, in its current version, has no authentication mechanism and simply listens to the protocol and port (no app and stream name).**
|
You need to know, that **ffmpeg in current version has no authentication mechanism and it just listen to the protocol and port (no app and stream name).**
|
||||||
|
|
||||||
ffplayout addresses this issue by monitoring the output from ffmpeg. When the input is **rtmp** and the app or stream name differs from the configuration, it stops the ingest process. So, in a way, we have some control over which streams are accepted and which are not.
|
ffplayout catches this problem with monitoring the output from ffmpeg. When the input is **rtmp** and the app or stream name differs to the config it stops the ingest process. So in a way we have a bit control, which stream we let come in and which not.
|
||||||
|
|
||||||
In theory, you can use any [protocol](https://ffmpeg.org/ffmpeg-protocols.html) from ffmpeg that supports a **listen** mode.
|
In theory you can use every [protocol](https://ffmpeg.org/ffmpeg-protocols.html) from ffmpeg which support a **listen** mode.
|
||||||
|
@ -2,15 +2,15 @@
|
|||||||
|
|
||||||
**\* This is an experimental feature and more intended for advanced users. Use it with caution!**
|
**\* This is an experimental feature and more intended for advanced users. Use it with caution!**
|
||||||
|
|
||||||
With _ffplayout_, you can output streams with multiple audio tracks, with some limitations:
|
With _ffplayout_ you can output streams with multiple audio tracks, with some limitations:
|
||||||
* Not all formats support multiple audio tracks. For example, _flv/rtmp_ doesn't support it.
|
* Not all formats support multiple audio tracks. For example _flv/rtmp_ doesn't support it.
|
||||||
* In your output parameters, you need to set the correct mapping.
|
* In your output parameters you need to set the correct mapping.
|
||||||
|
|
||||||
ffmpeg filter usage and encoding parameters can become very complex, so it may happen that not every combination works out of the box.
|
ffmpeg filter usage and encoding parameters can become very complex, so it can happen that not every combination works out of the box.
|
||||||
|
|
||||||
To get a better idea of what works, you can examine [engine_cmd](../tests/src/engine_cmd.rs).
|
To get e better idea of what works, you can examine [engin_cmd](../tests/src/engine_cmd.rs).
|
||||||
|
|
||||||
If you are outputting a single video stream with multiple audio tracks, for example with the `srt://` protocol, you only need to set the correct `audio_tracks:` count in your config under `processing:`.
|
If you just output a single video stream with multiple audio tracks, let's say with `srt://` protocol, you only need to set in you config under `processing:` the correct `audio_tracks:` count.
|
||||||
|
|
||||||
For multiple video resolutions and multiple audio tracks, the parameters could look like:
|
For multiple video resolutions and multiple audio tracks, the parameters could look like:
|
||||||
|
|
||||||
|
@ -2,11 +2,11 @@ ffplayout supports different types of outputs, let's explain them a bit:
|
|||||||
|
|
||||||
## Stream
|
## Stream
|
||||||
|
|
||||||
The streaming output can be used for any kind of classical streaming, such as **rtmp, srt, rtp**, etc. Any streaming type supported by ffmpeg should work.
|
The streaming output can be used for ever kind of classical streaming. For example for **rtmp, srt, rtp** etc. Any streaming type supported by ffmpeg should work.
|
||||||
|
|
||||||
**Remember that you need a streaming server as a destination if you want to use this mode.**
|
**Remember that you need a streaming server as a destination if you want to use this mode.**
|
||||||
|
|
||||||
For example, you can use:
|
You can use for example:
|
||||||
|
|
||||||
- [SRS](https://github.com/ossrs/srs)
|
- [SRS](https://github.com/ossrs/srs)
|
||||||
- [OvenMediaEngine](https://www.ovenmediaengine.com/ome)
|
- [OvenMediaEngine](https://www.ovenmediaengine.com/ome)
|
||||||
@ -17,9 +17,9 @@ Of course, you can also use media platforms that support streaming input.
|
|||||||
|
|
||||||
### Multiple Outputs:
|
### Multiple Outputs:
|
||||||
|
|
||||||
ffplayout supports multiple outputs in such a way that it can send the same stream to multiple targets with different encoding settings.
|
ffplayout supports multiple outputs in a way, that it can output the same stream to multiple targets with different encoding settings.
|
||||||
|
|
||||||
For example, if you want to stream at different resolutions, you could apply these output parameters:
|
For example you want to stream different resolutions, you could apply this output parameters:
|
||||||
|
|
||||||
```YAML
|
```YAML
|
||||||
...
|
...
|
||||||
@ -58,21 +58,21 @@ For example, if you want to stream at different resolutions, you could apply the
|
|||||||
|
|
||||||
When you are using the text overlay filter, it will apply to all outputs.
|
When you are using the text overlay filter, it will apply to all outputs.
|
||||||
|
|
||||||
The same applies to HLS output.
|
The same works to for HLS output.
|
||||||
|
|
||||||
If you want to use different resolutions, you should apply them in order from largest to smallest. Use the largest resolution in the config under `processing:` and the smaller ones in `output_params:`.
|
If you want to use different resolution, you should apply them in order from biggest to smallest. Use the biggest resolution in config under `processing:` and the smaller ones in `output_params:`.
|
||||||
|
|
||||||
## Desktop
|
## Desktop
|
||||||
|
|
||||||
In desktop mode, you will get your picture on the screen. For this, you need a desktop system; theoretically, all platforms should work here. ffplayout will require **ffplay** for that.
|
In desktop mode you will get your picture on screen. For this you need a desktop system, theoretical all platforms should work here. ffplayout will need for that **ffplay**.
|
||||||
|
|
||||||
## HLS
|
## HLS
|
||||||
|
|
||||||
In this mode, you can output directly to an HLS playlist. The nice thing here is that ffplayout requires fewer resources than in streaming mode.
|
In this mode you can output directly to a hls playlist. The nice thing here is, that ffplayout need less resources then in streaming mode.
|
||||||
|
|
||||||
HLS output is currently the default, mostly because it works out of the box and doesn't need a streaming target. By default, it saves the segments to **/usr/share/ffplayout/public/live/**.
|
HLS output is currently the default, mostly because it works out of the box and don't need a streaming target. In default settings it saves the segments to **/usr/share/ffplayout/public/live/**.
|
||||||
|
|
||||||
**It is recommended to serve the HLS stream with nginx or another web server, and not with ffplayout (which is more meant for previewing).**
|
**It is recommend to serve the HLS stream with nginx or another web server, and not with ffpapi (which is more meant for previewing).**
|
||||||
|
|
||||||
**HLS multiple outputs example:**
|
**HLS multiple outputs example:**
|
||||||
|
|
||||||
@ -135,50 +135,6 @@ HLS output is currently the default, mostly because it works out of the box and
|
|||||||
|
|
||||||
The using of **-filter_complex** and *mapping* is very limited, don't use it in situations other then for splitting the outputs.
|
The using of **-filter_complex** and *mapping* is very limited, don't use it in situations other then for splitting the outputs.
|
||||||
|
|
||||||
## Tee Muxer:
|
#### Activating Output
|
||||||
|
|
||||||
The tee pseudo-muxer in FFmpeg is crucial in live streaming scenarios where a single input needs to be encoded once and then broadcast to multiple outputs in different formats or protocols. This feature significantly reduces computational overhead and improves efficiency—in my tests, it achieved a 200% reduction in CPU processing expenditure—by eliminating the need for multiple FFmpeg instances or re-encoding the same input multiple times for different outputs.
|
To use one of the outputs you need to edit the **ffplayout.yml** config, here under **out** set your **mode** and use the different **output** options.
|
||||||
|
|
||||||
**FFmpeg's Tee Pseudo-Muxer Parameter Configuration:**
|
|
||||||
|
|
||||||
The configuration of the tee pseudo-muxer in FFmpeg allows for the broadcasting of a single input to multiple outputs simultaneously, each with specific settings. This is accomplished by specifying distinct formats and protocols for each output within a single command line, thus minimizing computational load by avoiding re-encoding for each target.
|
|
||||||
|
|
||||||
### Parameters and Syntax:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
-c:v libx264
|
|
||||||
-crf 23
|
|
||||||
-x264-params keyint=50:min-keyint=25:scenecut=-1
|
|
||||||
-maxrate 1300k
|
|
||||||
-bufsize 2600k
|
|
||||||
-preset faster
|
|
||||||
-tune zerolatency
|
|
||||||
-profile:v Main
|
|
||||||
-level 3.1
|
|
||||||
-c:a aac
|
|
||||||
-ar 44100
|
|
||||||
-b:a 128k
|
|
||||||
-flags +cgop
|
|
||||||
-flags +global_header
|
|
||||||
-f tee
|
|
||||||
[f=flv:onfail=ignore]rtmp://127.0.0.1:1935/798e3a9e-47b5-4cd5-8079-76a20e03fee6.stream|[f=mpegts:onfail=ignore]udp://127.0.0.1:1234?pkt_size=1316|[f=hls:hls_time=6:hls_list_size=600:hls_flags=append_list+delete_segments+omit_endlist:hls_segment_filename=/usr/share/ffplayout/public/live/stream-%d.ts]/usr/share/ffplayout/public/live/stream.m3u8
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
**1. `-f tee`**: Specifies the use of the tee pseudo-muxer, which facilitates the multiplexing of the broadcast.
|
|
||||||
|
|
||||||
**2. Use of “|” (pipe)**: The pipe symbol "|" acts as a separator between the different outputs within the tee command. Each segment separated by a pipe configures a distinct output for the broadcast.
|
|
||||||
|
|
||||||
**3. Stream Processing by the Tee**:
|
|
||||||
- **First Output**: `[f=flv:onfail=ignore]rtmp://127.0.0.1:1935/798e3a9e-47b5-4cd5-8079-76a20e03fee6.stream`
|
|
||||||
- **f=flv**: Sets the output format to FLV (Flash Video).
|
|
||||||
- **onfail=ignore**: Directs FFmpeg to continue operating even if this output fails.
|
|
||||||
|
|
||||||
- **Second Output**: `[f=mpegts:onfail=ignore]udp://127.0.0.1:1234?pkt_size=1316`
|
|
||||||
- **f=mpegts**: Sets the output format to MPEG-TS (MPEG Transport Stream).
|
|
||||||
- **udp://...**: Uses the UDP protocol to send the stream with a specified packet size (`pkt_size=1316`).
|
|
||||||
|
|
||||||
- **Third Output**: `[f=hls:hls_time=6:hls_list_size=600:hls_flags=append_list+delete_segments+omit_endlist:hls_segment_filename=/usr/share/ffplayout/public/live/stream-%d.ts]/usr/share/ffplayout/public/live/stream.m3u8`
|
|
||||||
- **f=hls**: Sets the output format to HLS (HTTP Live Streaming).
|
|
||||||
|
|
||||||
Each stream is processed by the tee pseudo-muxer, which encodes the input only once, directing it to various outputs as specified, thereby allowing for efficient and less resource-intensive operation.
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
## Playlist Generation Template
|
## Playlist generation template
|
||||||
|
|
||||||
It is possible to generate playlists based on templates. A template could look like:
|
It is possible to generate playlists based on templates. A template could look like:
|
||||||
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
### Preview Stream
|
### Preview Stream
|
||||||
|
|
||||||
When you are using the web frontend, you may wonder how to get a preview in the player. The default installation creates an HLS playlist, and the player uses this, but the HLS mode is not always utilized; instead, the stream output mode is activated.
|
When you are using the web frontend, maybe you wonder how you get a preview in the player. The default installation creates a HLS playlist and the player using this one, but most of the time the HLS mode is not used, instead the stream output mode is activated.
|
||||||
|
|
||||||
So if you stream to an external server, you have different options to get a preview stream for your player. The simplest option would be to obtain an m3u8 playlist address from your external target, such as: https://example.org/live/stream.m3u8. You can use this in the configuration section of the frontend.
|
So if you stream to a external server, you have different options to get a preview stream for you player. The simplest one would be, if you get a m3u8 playlist address from your external target, like: https://example.org/live/stream.m3u8 this you can use in the configuration section from the frontend.
|
||||||
|
|
||||||
Another option (which has not been tested) is to add an HLS output option to your streaming parameters.
|
Another option would be (which is not testet), to add a HLS output option to your streaming parameters.
|
||||||
|
|
||||||
The next option is to install an RTMP server locally and create your preview stream there. In the following lines, this is described in more detail.
|
The next option can be, that you install a rtmp server locally and create here your preview stream. In the following lines this is described in more detail.
|
||||||
|
|
||||||
The ffplayout engine has no special preview config parameters, but you can add your settings to the **output_param**, like:
|
The ffplayout engine has no special preview config parameters, but you can add your settings to the **output_param**, like:
|
||||||
|
|
||||||
@ -29,11 +29,11 @@ The ffplayout engine has no special preview config parameters, but you can add y
|
|||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
In this documentation, we assume that you are using [SRS](https://github.com/ossrs/srs) at least for the preview stream. The most stable solution is previewing over HLS, but it is also possible to use [HTTP-FLV](https://github.com/ossrs/srs/wiki/v4_EN_DeliveryHttpStream) for lower latency.
|
In this documentation we suspect, that you are using [ffplayout-frontend](https://github.com/ffplayout/ffplayout-frontend) and that you using [SRS](https://github.com/ossrs/srs) at least for the preview stream. The most stable solution is previewing over HLS, but it is also possible to use [HTTP-FLV](https://github.com/ossrs/srs/wiki/v4_EN_DeliveryHttpStream) for less latency.
|
||||||
|
|
||||||
To get this working, we need to follow some steps.
|
To get this working we have to follow some steps.
|
||||||
|
|
||||||
#### The first step is to compile and install SRS:
|
#### First step is to compile and install SRS:
|
||||||
|
|
||||||
```BASH
|
```BASH
|
||||||
# install some tool for compiling
|
# install some tool for compiling
|
||||||
@ -58,7 +58,7 @@ make install
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Now we need a systemd service to start SRS automatically. Create the file:
|
Now we need a systemd service, to startup SRS automatically. Create the file:
|
||||||
|
|
||||||
**/etc/systemd/system/srs.service**
|
**/etc/systemd/system/srs.service**
|
||||||
|
|
||||||
@ -134,11 +134,11 @@ vhost __defaultVhost__ {
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Now you can enable and start SRS with: `systemctl enable --now srs` and check if it is running: `systemctl status srs`.
|
Now you can enable and start SRS with: `systemctl enable --now srs` and check if it is running: `systemctl status srs`
|
||||||
|
|
||||||
#### Configure Nginx
|
#### Configure Nginx
|
||||||
|
|
||||||
We assume that you have already installed Nginx and are using it for the frontend. Open the frontend config **/etc/nginx/sites-enabled/ffplayout.conf** and add a new location to it:
|
We assume that you have already installed nginx and you are using it already for the frontend. So open the frontend config **/etc/nginx/sites-enabled/ffplayout.conf** and add a new location to it:
|
||||||
|
|
||||||
```NGINX
|
```NGINX
|
||||||
location /live/stream.flv {
|
location /live/stream.flv {
|
||||||
@ -192,10 +192,10 @@ server {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Of course, in production, you should have an HTTPS directive as well, but this step is up to you.
|
Of course in production you should have a HTTPS directive to, but this step is up to you.
|
||||||
|
|
||||||
Restart Nginx.
|
Restart Nginx.
|
||||||
|
|
||||||
You can (re)start ffplayout, and when you have set everything up correctly, it should run without errors.
|
You can (re)start ffplayout and when you setup everything correct it should run without errors.
|
||||||
|
|
||||||
You can now go to your frontend configuration and change the `player_url` to: `http://[domain or IP]/live/stream.flv` or `http://[domain or IP]/live/stream.m3u8`. Save and reload the page. When you go to the player tab, you should see the preview video.
|
You can go now in your frontend configuration and change the `player_url` to: `http://[domain or IP]/live/stream.flv` or `http://[domain or IP]/live/stream.m3u8`, save and reload the page. When you go now to the player tap you should see the preview video.
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
### Video from URL
|
### Video from URL
|
||||||
|
Videos from URL are videos where you can watch directly in browser or download, for example:
|
||||||
Videos from a URL are videos that you can watch directly in your browser or download. For example:
|
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@ -11,8 +10,8 @@ Videos from a URL are videos that you can watch directly in your browser or down
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
This should work in general because most of the time it has duration information and is faster to play than a real live stream source. Avoid seeking, as it can take too much time.
|
This should work in general, because most time it have a duration information and it is faster playable then a real live stream source. Avoid seeking because it can take to much time.
|
||||||
|
|
||||||
**Live streams as input in playlists, such as RTMP, are not supported.**
|
**Live streams as input in playlist, like rtmp is not supported.**
|
||||||
|
|
||||||
Be careful with this; it's better to test it multiple times!
|
Be careful with it, better test it multiple times!
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
### Stream Copy
|
### Stream Copy
|
||||||
|
|
||||||
ffplayout has supported a stream copy mode. A separate copy mode for video and audio is possible. This mode uses less CPU and RAM but has some drawbacks:
|
ffplayout supports a stream copy mode since v0.20.0. A separate copy mode for video and audio is possible. This mode uses less CPU and RAM, but has some drawbacks:
|
||||||
|
|
||||||
- All files must have exactly the same resolution, framerate, color depth, audio channels, and kHz.
|
- All files must have exactly the same resolution, framerate, color depth, audio channels and kHz.
|
||||||
- All files must use the same codecs and settings.
|
- All files must use the same codecs and settings.
|
||||||
- The video and audio lines of a file must be the same length.
|
- The video and audio lines of a file must be the same length.
|
||||||
- The codecs and A/V settings must be supported by MPEG-TS and the output destination.
|
- The codecs and A/V settings must be supported by mpegts and the output destination.
|
||||||
|
|
||||||
**This mode is experimental and will not have the same stability as the stream mode.**
|
**This mode is experimental and will not have the same stability as the stream mode.**
|
||||||
|
@ -1,60 +0,0 @@
|
|||||||
use log::*;
|
|
||||||
use std::io::Write;
|
|
||||||
|
|
||||||
use flexi_logger::writers::{FileLogWriter, LogWriter};
|
|
||||||
use flexi_logger::{Age, Cleanup, Criterion, DeferredNow, FileSpec, Logger, Naming};
|
|
||||||
|
|
||||||
pub fn file_logger() -> Box<dyn LogWriter> {
|
|
||||||
Box::new(
|
|
||||||
FileLogWriter::builder(
|
|
||||||
FileSpec::default()
|
|
||||||
.suppress_timestamp()
|
|
||||||
.directory("./logs")
|
|
||||||
.discriminant("1")
|
|
||||||
.basename("ffplayout"),
|
|
||||||
)
|
|
||||||
.append()
|
|
||||||
.format(file_formatter)
|
|
||||||
.rotate(
|
|
||||||
Criterion::Age(Age::Day),
|
|
||||||
Naming::TimestampsCustomFormat {
|
|
||||||
current_infix: Some(""),
|
|
||||||
format: "%Y-%m-%d",
|
|
||||||
},
|
|
||||||
Cleanup::KeepLogFiles(4),
|
|
||||||
)
|
|
||||||
.print_message()
|
|
||||||
.try_build()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_formatter(
|
|
||||||
w: &mut dyn Write,
|
|
||||||
now: &mut DeferredNow,
|
|
||||||
record: &Record,
|
|
||||||
) -> std::io::Result<()> {
|
|
||||||
write!(
|
|
||||||
w,
|
|
||||||
"[{}] [{:>5}] {}",
|
|
||||||
now.now().format("%Y-%m-%d %H:%M:%S%.6f"),
|
|
||||||
record.level(),
|
|
||||||
record.args()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
Logger::try_with_str("WARN")
|
|
||||||
.expect("LogSpecification String has errors")
|
|
||||||
.print_message()
|
|
||||||
.log_to_stderr()
|
|
||||||
.add_writer("Alert", file_logger())
|
|
||||||
.start()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
error!(target : "{Alert,_Default}", "This is error message");
|
|
||||||
warn!(target : "{Alert,_Default}", "This is a warning");
|
|
||||||
info!(target : "{Alert,_Default}", "This is an info message");
|
|
||||||
debug!(target : "{Alert,_Default}", "This is an debug message");
|
|
||||||
trace!(target : "{Alert,_Default}", "This is an trace message");
|
|
||||||
}
|
|
@ -1,85 +0,0 @@
|
|||||||
use flexi_logger::writers::{FileLogWriter, LogWriter};
|
|
||||||
use flexi_logger::{Age, Cleanup, Criterion, DeferredNow, FileSpec, Naming, Record};
|
|
||||||
use log::{debug, error, info, kv::Value, trace, warn};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::io;
|
|
||||||
use std::sync::{Arc, Mutex};
|
|
||||||
|
|
||||||
struct MultiFileLogger {
|
|
||||||
writers: Arc<Mutex<HashMap<String, Arc<Mutex<FileLogWriter>>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MultiFileLogger {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
MultiFileLogger {
|
|
||||||
writers: Arc::new(Mutex::new(HashMap::new())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_writer(&self, channel: &str) -> io::Result<Arc<Mutex<FileLogWriter>>> {
|
|
||||||
let mut writers = self.writers.lock().unwrap();
|
|
||||||
if !writers.contains_key(channel) {
|
|
||||||
let writer = FileLogWriter::builder(
|
|
||||||
FileSpec::default()
|
|
||||||
.suppress_timestamp()
|
|
||||||
.basename("ffplayout"),
|
|
||||||
)
|
|
||||||
.append()
|
|
||||||
.rotate(
|
|
||||||
Criterion::Age(Age::Day),
|
|
||||||
Naming::TimestampsCustomFormat {
|
|
||||||
current_infix: Some(""),
|
|
||||||
format: "%Y-%m-%d",
|
|
||||||
},
|
|
||||||
Cleanup::KeepLogFiles(7),
|
|
||||||
)
|
|
||||||
.print_message()
|
|
||||||
.try_build()
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
|
|
||||||
writers.insert(channel.to_string(), Arc::new(Mutex::new(writer)));
|
|
||||||
}
|
|
||||||
Ok(writers.get(channel).unwrap().clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LogWriter for MultiFileLogger {
|
|
||||||
fn write(&self, now: &mut DeferredNow, record: &Record) -> io::Result<()> {
|
|
||||||
let channel = record
|
|
||||||
.key_values()
|
|
||||||
.get("channel".into())
|
|
||||||
.unwrap_or(Value::null())
|
|
||||||
.to_string();
|
|
||||||
let writer = self.get_writer(&channel);
|
|
||||||
let w = writer?.lock().unwrap().write(now, record);
|
|
||||||
|
|
||||||
w
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&self) -> io::Result<()> {
|
|
||||||
let writers = self.writers.lock().unwrap();
|
|
||||||
for writer in writers.values() {
|
|
||||||
writer.lock().unwrap().flush()?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let logger = MultiFileLogger::new();
|
|
||||||
|
|
||||||
flexi_logger::Logger::try_with_str("trace")
|
|
||||||
.expect("LogSpecification String has errors")
|
|
||||||
.print_message()
|
|
||||||
.add_writer("file", Box::new(logger))
|
|
||||||
.log_to_stderr()
|
|
||||||
.start()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
trace!(target: "{file}", channel = 1; "This is a trace message for file1");
|
|
||||||
trace!("This is a trace message for console");
|
|
||||||
debug!(target: "{file}", channel = 2; "This is a debug message for file2");
|
|
||||||
info!(target:"{file}", channel = 2; "This is an info message for file2");
|
|
||||||
warn!(target: "{file}", channel = 1; "This is a warning for file1");
|
|
||||||
error!(target: "{file}", channel = 2; "This is an error message for file2");
|
|
||||||
info!("This is a info message for console");
|
|
||||||
}
|
|
@ -1,552 +0,0 @@
|
|||||||
use argon2::{
|
|
||||||
password_hash::{rand_core::OsRng, SaltString},
|
|
||||||
Argon2, PasswordHasher,
|
|
||||||
};
|
|
||||||
|
|
||||||
use rand::{distributions::Alphanumeric, Rng};
|
|
||||||
use sqlx::{sqlite::SqliteQueryResult, Pool, Row, Sqlite};
|
|
||||||
use tokio::task;
|
|
||||||
|
|
||||||
use super::models::{AdvancedConfiguration, Configuration};
|
|
||||||
use crate::db::models::{Channel, GlobalSettings, Role, TextPreset, User};
|
|
||||||
use crate::utils::{
|
|
||||||
advanced_config::AdvancedConfig, config::PlayoutConfig, is_running_in_container,
|
|
||||||
local_utc_offset,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub async fn db_migrate(conn: &Pool<Sqlite>) -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
sqlx::migrate!("../migrations").run(conn).await?;
|
|
||||||
|
|
||||||
if select_global(conn).await.is_err() {
|
|
||||||
let secret: String = rand::thread_rng()
|
|
||||||
.sample_iter(&Alphanumeric)
|
|
||||||
.take(80)
|
|
||||||
.map(char::from)
|
|
||||||
.collect();
|
|
||||||
let shared = is_running_in_container().await;
|
|
||||||
|
|
||||||
let query = "CREATE TRIGGER global_row_count
|
|
||||||
BEFORE INSERT ON global
|
|
||||||
WHEN (SELECT COUNT(*) FROM global) >= 1
|
|
||||||
BEGIN
|
|
||||||
SELECT RAISE(FAIL, 'Database is already initialized!');
|
|
||||||
END;
|
|
||||||
INSERT INTO global(secret, shared) VALUES($1, $2);";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(secret)
|
|
||||||
.bind(shared)
|
|
||||||
.execute(conn)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_global(conn: &Pool<Sqlite>) -> Result<GlobalSettings, sqlx::Error> {
|
|
||||||
let query =
|
|
||||||
"SELECT id, secret, logs, playlists, public, storage, shared, mail_smtp, mail_user, mail_password, mail_starttls FROM global WHERE id = 1";
|
|
||||||
|
|
||||||
sqlx::query_as(query).fetch_one(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_global(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
global: GlobalSettings,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "UPDATE global SET logs = $2, playlists = $3, public = $4, storage = $5,
|
|
||||||
mail_smtp = $6, mail_user = $7, mail_password = $8, mail_starttls = $9 WHERE id = 1";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(global.id)
|
|
||||||
.bind(global.logs)
|
|
||||||
.bind(global.playlists)
|
|
||||||
.bind(global.public)
|
|
||||||
.bind(global.storage)
|
|
||||||
.bind(global.mail_smtp)
|
|
||||||
.bind(global.mail_user)
|
|
||||||
.bind(global.mail_password)
|
|
||||||
.bind(global.mail_starttls)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_channel(conn: &Pool<Sqlite>, id: &i32) -> Result<Channel, sqlx::Error> {
|
|
||||||
let query = "SELECT * FROM channels WHERE id = $1";
|
|
||||||
let mut result: Channel = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
|
|
||||||
|
|
||||||
result.utc_offset = local_utc_offset();
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_related_channels(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
user_id: Option<i32>,
|
|
||||||
) -> Result<Vec<Channel>, sqlx::Error> {
|
|
||||||
let query = match user_id {
|
|
||||||
Some(id) => format!(
|
|
||||||
"SELECT c.id, c.name, c.preview_url, c.extra_extensions, c.active, c.public, c.playlists, c.storage, c.last_date, c.time_shift FROM channels c
|
|
||||||
left join user_channels uc on uc.channel_id = c.id
|
|
||||||
left join user u on u.id = uc.user_id
|
|
||||||
WHERE u.id = {id} ORDER BY c.id ASC;"
|
|
||||||
),
|
|
||||||
None => "SELECT * FROM channels ORDER BY id ASC;".to_string(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut results: Vec<Channel> = sqlx::query_as(&query).fetch_all(conn).await?;
|
|
||||||
|
|
||||||
for result in results.iter_mut() {
|
|
||||||
result.utc_offset = local_utc_offset();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(results)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_user_channel(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
user_id: i32,
|
|
||||||
channel_id: i32,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "DELETE FROM user_channels WHERE user_id = $1 AND channel_id = $2";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(user_id)
|
|
||||||
.bind(channel_id)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_channel(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: i32,
|
|
||||||
channel: Channel,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query =
|
|
||||||
"UPDATE channels SET name = $2, preview_url = $3, extra_extensions = $4, public = $5, playlists = $6, storage = $7 WHERE id = $1";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(id)
|
|
||||||
.bind(channel.name)
|
|
||||||
.bind(channel.preview_url)
|
|
||||||
.bind(channel.extra_extensions)
|
|
||||||
.bind(channel.public)
|
|
||||||
.bind(channel.playlists)
|
|
||||||
.bind(channel.storage)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_stat(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: i32,
|
|
||||||
last_date: String,
|
|
||||||
time_shift: f64,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "UPDATE channels SET last_date = $2, time_shift = $3 WHERE id = $1";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(id)
|
|
||||||
.bind(last_date)
|
|
||||||
.bind(time_shift)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_player(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: i32,
|
|
||||||
active: bool,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "UPDATE channels SET active = $2 WHERE id = $1";
|
|
||||||
|
|
||||||
sqlx::query(query).bind(id).bind(active).execute(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert_channel(conn: &Pool<Sqlite>, channel: Channel) -> Result<Channel, sqlx::Error> {
|
|
||||||
let query = "INSERT INTO channels (name, preview_url, extra_extensions, public, playlists, storage) VALUES($1, $2, $3, $4, $5, $6)";
|
|
||||||
let result = sqlx::query(query)
|
|
||||||
.bind(channel.name)
|
|
||||||
.bind(channel.preview_url)
|
|
||||||
.bind(channel.extra_extensions)
|
|
||||||
.bind(channel.public)
|
|
||||||
.bind(channel.playlists)
|
|
||||||
.bind(channel.storage)
|
|
||||||
.execute(conn)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sqlx::query_as("SELECT * FROM channels WHERE id = $1")
|
|
||||||
.bind(result.last_insert_rowid())
|
|
||||||
.fetch_one(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_channel(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: &i32,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "DELETE FROM channels WHERE id = $1";
|
|
||||||
|
|
||||||
sqlx::query(query).bind(id).execute(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_last_channel(conn: &Pool<Sqlite>) -> Result<i32, sqlx::Error> {
|
|
||||||
let query = "select seq from sqlite_sequence WHERE name = 'channel';";
|
|
||||||
|
|
||||||
sqlx::query_scalar(query).fetch_one(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_configuration(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
channel: i32,
|
|
||||||
) -> Result<Configuration, sqlx::Error> {
|
|
||||||
let query = "SELECT * FROM configurations WHERE channel_id = $1";
|
|
||||||
|
|
||||||
sqlx::query_as(query).bind(channel).fetch_one(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert_configuration(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
channel_id: i32,
|
|
||||||
output_param: String,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "INSERT INTO configurations (channel_id, output_param) VALUES($1, $2)";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(channel_id)
|
|
||||||
.bind(output_param)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_configuration(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: i32,
|
|
||||||
config: PlayoutConfig,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "UPDATE configurations SET general_stop_threshold = $2, mail_subject = $3, mail_recipient = $4, mail_level = $5, mail_interval = $6, logging_ffmpeg_level = $7, logging_ingest_level = $8, logging_detect_silence = $9, logging_ignore = $10, processing_mode = $11, processing_audio_only = $12, processing_copy_audio = $13, processing_copy_video = $14, processing_width = $15, processing_height = $16, processing_aspect = $17, processing_fps = $18, processing_add_logo = $19, processing_logo = $20, processing_logo_scale = $21, processing_logo_opacity = $22, processing_logo_position = $23, processing_audio_tracks = $24, processing_audio_track_index = $25, processing_audio_channels = $26, processing_volume = $27, processing_filter = $28, processing_vtt_enable = $29, processing_vtt_dummy = $30, ingest_enable = $31, ingest_param = $32, ingest_filter = $33, playlist_day_start = $34, playlist_length = $35, playlist_infinit = $36, storage_filler = $37, storage_extensions = $38, storage_shuffle = $39, text_add = $40, text_from_filename = $41, text_font = $42, text_style = $43, text_regex = $44, task_enable = $45, task_path = $46, output_mode = $47, output_param = $48 WHERE id = $1";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(id)
|
|
||||||
.bind(config.general.stop_threshold)
|
|
||||||
.bind(config.mail.subject)
|
|
||||||
.bind(config.mail.recipient)
|
|
||||||
.bind(config.mail.mail_level.as_str())
|
|
||||||
.bind(config.mail.interval)
|
|
||||||
.bind(config.logging.ffmpeg_level)
|
|
||||||
.bind(config.logging.ingest_level)
|
|
||||||
.bind(config.logging.detect_silence)
|
|
||||||
.bind(config.logging.ignore_lines.join(";"))
|
|
||||||
.bind(config.processing.mode.to_string())
|
|
||||||
.bind(config.processing.audio_only)
|
|
||||||
.bind(config.processing.copy_audio)
|
|
||||||
.bind(config.processing.copy_video)
|
|
||||||
.bind(config.processing.width)
|
|
||||||
.bind(config.processing.height)
|
|
||||||
.bind(config.processing.aspect)
|
|
||||||
.bind(config.processing.fps)
|
|
||||||
.bind(config.processing.add_logo)
|
|
||||||
.bind(config.processing.logo)
|
|
||||||
.bind(config.processing.logo_scale)
|
|
||||||
.bind(config.processing.logo_opacity)
|
|
||||||
.bind(config.processing.logo_position)
|
|
||||||
.bind(config.processing.audio_tracks)
|
|
||||||
.bind(config.processing.audio_track_index)
|
|
||||||
.bind(config.processing.audio_channels)
|
|
||||||
.bind(config.processing.volume)
|
|
||||||
.bind(config.processing.custom_filter)
|
|
||||||
.bind(config.processing.vtt_enable)
|
|
||||||
.bind(config.processing.vtt_dummy)
|
|
||||||
.bind(config.ingest.enable)
|
|
||||||
.bind(config.ingest.input_param)
|
|
||||||
.bind(config.ingest.custom_filter)
|
|
||||||
.bind(config.playlist.day_start)
|
|
||||||
.bind(config.playlist.length)
|
|
||||||
.bind(config.playlist.infinit)
|
|
||||||
.bind(config.storage.filler)
|
|
||||||
.bind(config.storage.extensions.join(";"))
|
|
||||||
.bind(config.storage.shuffle)
|
|
||||||
.bind(config.text.add_text)
|
|
||||||
.bind(config.text.text_from_filename)
|
|
||||||
.bind(config.text.font)
|
|
||||||
.bind(config.text.style)
|
|
||||||
.bind(config.text.regex)
|
|
||||||
.bind(config.task.enable)
|
|
||||||
.bind(config.task.path.to_string_lossy().to_string())
|
|
||||||
.bind(config.output.mode.to_string())
|
|
||||||
.bind(config.output.output_param)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert_advanced_configuration(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
channel_id: i32,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "INSERT INTO advanced_configurations (channel_id) VALUES($1)";
|
|
||||||
|
|
||||||
sqlx::query(query).bind(channel_id).execute(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_advanced_configuration(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
channel_id: i32,
|
|
||||||
config: AdvancedConfig,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "UPDATE advanced_configurations SET decoder_input_param = $2, decoder_output_param = $3, encoder_input_param = $4, ingest_input_param = $5, filter_deinterlace = $6, filter_pad_scale_w = $7, filter_pad_scale_h = $8, filter_pad_video = $9, filter_fps = $10, filter_scale = $11, filter_set_dar = $12, filter_fade_in = $13, filter_fade_out = $14, filter_overlay_logo_scale = $15, filter_overlay_logo_fade_in = $16, filter_overlay_logo_fade_out = $17, filter_overlay_logo = $18, filter_tpad = $19, filter_drawtext_from_file = $20, filter_drawtext_from_zmq = $21, filter_aevalsrc = $22, filter_afade_in = $23, filter_afade_out = $24, filter_apad = $25, filter_volume = $26, filter_split = $27 WHERE channel_id = $1";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(channel_id)
|
|
||||||
.bind(config.decoder.input_param)
|
|
||||||
.bind(config.decoder.output_param)
|
|
||||||
.bind(config.encoder.input_param)
|
|
||||||
.bind(config.ingest.input_param)
|
|
||||||
.bind(config.filter.deinterlace)
|
|
||||||
.bind(config.filter.pad_scale_w)
|
|
||||||
.bind(config.filter.pad_scale_h)
|
|
||||||
.bind(config.filter.pad_video)
|
|
||||||
.bind(config.filter.fps)
|
|
||||||
.bind(config.filter.scale)
|
|
||||||
.bind(config.filter.set_dar)
|
|
||||||
.bind(config.filter.fade_in)
|
|
||||||
.bind(config.filter.fade_out)
|
|
||||||
.bind(config.filter.overlay_logo_scale)
|
|
||||||
.bind(config.filter.overlay_logo_fade_in)
|
|
||||||
.bind(config.filter.overlay_logo_fade_out)
|
|
||||||
.bind(config.filter.overlay_logo)
|
|
||||||
.bind(config.filter.tpad)
|
|
||||||
.bind(config.filter.drawtext_from_file)
|
|
||||||
.bind(config.filter.drawtext_from_zmq)
|
|
||||||
.bind(config.filter.aevalsrc)
|
|
||||||
.bind(config.filter.afade_in)
|
|
||||||
.bind(config.filter.afade_out)
|
|
||||||
.bind(config.filter.apad)
|
|
||||||
.bind(config.filter.volume)
|
|
||||||
.bind(config.filter.split)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_advanced_configuration(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
channel: i32,
|
|
||||||
) -> Result<AdvancedConfiguration, sqlx::Error> {
|
|
||||||
let query = "SELECT * FROM advanced_configurations WHERE channel_id = $1";
|
|
||||||
|
|
||||||
sqlx::query_as(query).bind(channel).fetch_one(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_role(conn: &Pool<Sqlite>, id: &i32) -> Result<Role, sqlx::Error> {
|
|
||||||
let query = "SELECT name FROM roles WHERE id = $1";
|
|
||||||
let result: Role = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_login(conn: &Pool<Sqlite>, user: &str) -> Result<User, sqlx::Error> {
|
|
||||||
let query =
|
|
||||||
"SELECT u.id, u.mail, u.username, u.password, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
|
|
||||||
left join user_channels uc on uc.user_id = u.id
|
|
||||||
WHERE u.username = $1";
|
|
||||||
|
|
||||||
sqlx::query_as(query).bind(user).fetch_one(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_user(conn: &Pool<Sqlite>, id: i32) -> Result<User, sqlx::Error> {
|
|
||||||
let query = "SELECT u.id, u.mail, u.username, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
|
|
||||||
left join user_channels uc on uc.user_id = u.id
|
|
||||||
WHERE u.id = $1";
|
|
||||||
|
|
||||||
sqlx::query_as(query).bind(id).fetch_one(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_global_admins(conn: &Pool<Sqlite>) -> Result<Vec<User>, sqlx::Error> {
|
|
||||||
let query = "SELECT u.id, u.mail, u.username, u.role_id, group_concat(uc.channel_id, ',') as channel_ids FROM user u
|
|
||||||
left join user_channels uc on uc.user_id = u.id
|
|
||||||
WHERE u.role_id = 1";
|
|
||||||
|
|
||||||
sqlx::query_as(query).fetch_all(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_users(conn: &Pool<Sqlite>) -> Result<Vec<User>, sqlx::Error> {
|
|
||||||
let query = "SELECT id, username FROM user";
|
|
||||||
|
|
||||||
sqlx::query_as(query).fetch_all(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert_user(conn: &Pool<Sqlite>, user: User) -> Result<(), sqlx::Error> {
|
|
||||||
let password_hash = task::spawn_blocking(move || {
|
|
||||||
let salt = SaltString::generate(&mut OsRng);
|
|
||||||
let hash = Argon2::default()
|
|
||||||
.hash_password(user.password.clone().as_bytes(), &salt)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
hash.to_string()
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let query =
|
|
||||||
"INSERT INTO user (mail, username, password, role_id) VALUES($1, $2, $3, $4) RETURNING id";
|
|
||||||
|
|
||||||
let user_id: i32 = sqlx::query(query)
|
|
||||||
.bind(user.mail)
|
|
||||||
.bind(user.username)
|
|
||||||
.bind(password_hash)
|
|
||||||
.bind(user.role_id)
|
|
||||||
.fetch_one(conn)
|
|
||||||
.await?
|
|
||||||
.get("id");
|
|
||||||
|
|
||||||
if let Some(channel_ids) = user.channel_ids {
|
|
||||||
insert_user_channel(conn, user_id, channel_ids).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert_or_update_user(conn: &Pool<Sqlite>, user: User) -> Result<(), sqlx::Error> {
|
|
||||||
let password_hash = task::spawn_blocking(move || {
|
|
||||||
let salt = SaltString::generate(&mut OsRng);
|
|
||||||
let hash = Argon2::default()
|
|
||||||
.hash_password(user.password.clone().as_bytes(), &salt)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
hash.to_string()
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let query = "INSERT INTO user (mail, username, password, role_id) VALUES($1, $2, $3, $4)
|
|
||||||
ON CONFLICT(username) DO UPDATE SET
|
|
||||||
mail = excluded.mail, username = excluded.username, password = excluded.password, role_id = excluded.role_id
|
|
||||||
RETURNING id";
|
|
||||||
|
|
||||||
let user_id: i32 = sqlx::query(query)
|
|
||||||
.bind(user.mail)
|
|
||||||
.bind(user.username)
|
|
||||||
.bind(password_hash)
|
|
||||||
.bind(user.role_id)
|
|
||||||
.fetch_one(conn)
|
|
||||||
.await?
|
|
||||||
.get("id");
|
|
||||||
|
|
||||||
if let Some(channel_ids) = user.channel_ids {
|
|
||||||
insert_user_channel(conn, user_id, channel_ids).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_user(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: i32,
|
|
||||||
fields: String,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = format!("UPDATE user SET {fields} WHERE id = $1");
|
|
||||||
|
|
||||||
sqlx::query(&query).bind(id).execute(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert_user_channel(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
user_id: i32,
|
|
||||||
channel_ids: Vec<i32>,
|
|
||||||
) -> Result<(), sqlx::Error> {
|
|
||||||
for channel in &channel_ids {
|
|
||||||
let query = "INSERT OR IGNORE INTO user_channels (channel_id, user_id) VALUES ($1, $2);";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(channel)
|
|
||||||
.bind(user_id)
|
|
||||||
.execute(conn)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_user(conn: &Pool<Sqlite>, id: i32) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "DELETE FROM user WHERE id = $1;";
|
|
||||||
|
|
||||||
sqlx::query(query).bind(id).execute(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn select_presets(conn: &Pool<Sqlite>, id: i32) -> Result<Vec<TextPreset>, sqlx::Error> {
|
|
||||||
let query = "SELECT * FROM presets WHERE channel_id = $1";
|
|
||||||
|
|
||||||
sqlx::query_as(query).bind(id).fetch_all(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_preset(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: &i32,
|
|
||||||
preset: TextPreset,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query =
|
|
||||||
"UPDATE presets SET name = $1, text = $2, x = $3, y = $4, fontsize = $5, line_spacing = $6,
|
|
||||||
fontcolor = $7, alpha = $8, box = $9, boxcolor = $10, boxborderw = $11 WHERE id = $12";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(preset.name)
|
|
||||||
.bind(preset.text)
|
|
||||||
.bind(preset.x)
|
|
||||||
.bind(preset.y)
|
|
||||||
.bind(preset.fontsize)
|
|
||||||
.bind(preset.line_spacing)
|
|
||||||
.bind(preset.fontcolor)
|
|
||||||
.bind(preset.alpha)
|
|
||||||
.bind(preset.r#box)
|
|
||||||
.bind(preset.boxcolor)
|
|
||||||
.bind(preset.boxborderw)
|
|
||||||
.bind(id)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert_preset(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
preset: TextPreset,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query =
|
|
||||||
"INSERT INTO presets (channel_id, name, text, x, y, fontsize, line_spacing, fontcolor, alpha, box, boxcolor, boxborderw)
|
|
||||||
VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)";
|
|
||||||
|
|
||||||
sqlx::query(query)
|
|
||||||
.bind(preset.channel_id)
|
|
||||||
.bind(preset.name)
|
|
||||||
.bind(preset.text)
|
|
||||||
.bind(preset.x)
|
|
||||||
.bind(preset.y)
|
|
||||||
.bind(preset.fontsize)
|
|
||||||
.bind(preset.line_spacing)
|
|
||||||
.bind(preset.fontcolor)
|
|
||||||
.bind(preset.alpha)
|
|
||||||
.bind(preset.r#box)
|
|
||||||
.bind(preset.boxcolor)
|
|
||||||
.bind(preset.boxborderw)
|
|
||||||
.execute(conn)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn new_channel_presets(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
channel_id: i32,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "INSERT INTO presets (name, text, x, y, fontsize, line_spacing, fontcolor, box, boxcolor, boxborderw, alpha, channel_id)
|
|
||||||
VALUES ('Default', 'Welcome to ffplayout messenger!', '(w-text_w)/2', '(h-text_h)/2', '24', '4', '#ffffff@0xff', '0', '#000000@0x80', '4', '1.0', $1),
|
|
||||||
('Empty Text', '', '0', '0', '24', '4', '#000000', '0', '#000000', '0', '0', $1),
|
|
||||||
('Bottom Text fade in', 'The upcoming event will be delayed by a few minutes.', '(w-text_w)/2', '(h-line_h)*0.9', '24', '4', '#ffffff', '1', '#000000@0x80', '4', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),0,if(lt(t,ld(1)+2),(t-(ld(1)+1))/1,if(lt(t,ld(1)+8),1,if(lt(t,ld(1)+9),(1-(t-(ld(1)+8)))/1,0))))', $1),
|
|
||||||
('Scrolling Text', 'We have a very important announcement to make.', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),w+4,w-w/12*mod(t-ld(1),12*(w+tw)/w))', '(h-line_h)*0.9', '24', '4', '#ffffff', '1', '#000000@0x80', '4', '1.0', $1);";
|
|
||||||
|
|
||||||
sqlx::query(query).bind(channel_id).execute(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_preset(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: &i32,
|
|
||||||
) -> Result<SqliteQueryResult, sqlx::Error> {
|
|
||||||
let query = "DELETE FROM presets WHERE id = $1;";
|
|
||||||
|
|
||||||
sqlx::query(query).bind(id).execute(conn).await
|
|
||||||
}
|
|
@ -1,40 +0,0 @@
|
|||||||
use std::io::{stdin, stdout, Write};
|
|
||||||
|
|
||||||
use sqlx::{migrate::MigrateDatabase, Pool, Sqlite, SqlitePool};
|
|
||||||
|
|
||||||
pub mod handles;
|
|
||||||
pub mod models;
|
|
||||||
|
|
||||||
use crate::utils::db_path;
|
|
||||||
|
|
||||||
pub async fn db_pool() -> Result<Pool<Sqlite>, sqlx::Error> {
|
|
||||||
let db_path = db_path().unwrap();
|
|
||||||
|
|
||||||
if !Sqlite::database_exists(db_path).await.unwrap_or(false) {
|
|
||||||
Sqlite::create_database(db_path).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let conn = SqlitePool::connect(db_path).await?;
|
|
||||||
|
|
||||||
Ok(conn)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn db_drop() {
|
|
||||||
let mut drop_answer = String::new();
|
|
||||||
|
|
||||||
print!("Drop Database [Y/n]: ");
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut drop_answer)
|
|
||||||
.expect("Did not enter a yes or no?");
|
|
||||||
|
|
||||||
let drop = drop_answer.trim().to_lowercase().starts_with('y');
|
|
||||||
|
|
||||||
if drop {
|
|
||||||
match Sqlite::drop_database(db_path().unwrap()).await {
|
|
||||||
Ok(_) => println!("Successfully dropped DB"),
|
|
||||||
Err(e) => eprintln!("{e}"),
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,434 +0,0 @@
|
|||||||
use std::{error::Error, fmt, str::FromStr};
|
|
||||||
|
|
||||||
use once_cell::sync::OnceCell;
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{
|
|
||||||
de::{self, Visitor},
|
|
||||||
Deserialize, Serialize,
|
|
||||||
};
|
|
||||||
// use serde_with::{formats::CommaSeparator, serde_as, StringWithSeparator};
|
|
||||||
use sqlx::{sqlite::SqliteRow, FromRow, Pool, Row, Sqlite};
|
|
||||||
|
|
||||||
use crate::db::handles;
|
|
||||||
use crate::utils::config::PlayoutConfig;
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, Deserialize, Serialize, sqlx::FromRow)]
|
|
||||||
pub struct GlobalSettings {
|
|
||||||
pub id: i32,
|
|
||||||
pub secret: Option<String>,
|
|
||||||
pub logs: String,
|
|
||||||
pub playlists: String,
|
|
||||||
pub public: String,
|
|
||||||
pub storage: String,
|
|
||||||
pub shared: bool,
|
|
||||||
pub mail_smtp: String,
|
|
||||||
pub mail_user: String,
|
|
||||||
pub mail_password: String,
|
|
||||||
pub mail_starttls: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GlobalSettings {
|
|
||||||
pub async fn new(conn: &Pool<Sqlite>) -> Self {
|
|
||||||
let global_settings = handles::select_global(conn);
|
|
||||||
|
|
||||||
match global_settings.await {
|
|
||||||
Ok(g) => g,
|
|
||||||
Err(_) => GlobalSettings {
|
|
||||||
id: 0,
|
|
||||||
secret: None,
|
|
||||||
logs: String::new(),
|
|
||||||
playlists: String::new(),
|
|
||||||
public: String::new(),
|
|
||||||
storage: String::new(),
|
|
||||||
shared: false,
|
|
||||||
mail_smtp: String::new(),
|
|
||||||
mail_user: String::new(),
|
|
||||||
mail_password: String::new(),
|
|
||||||
mail_starttls: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn global() -> &'static GlobalSettings {
|
|
||||||
INSTANCE.get().expect("Config is not initialized")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static INSTANCE: OnceCell<GlobalSettings> = OnceCell::new();
|
|
||||||
|
|
||||||
pub async fn init_globales(conn: &Pool<Sqlite>) {
|
|
||||||
let config = GlobalSettings::new(conn).await;
|
|
||||||
INSTANCE.set(config).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize, Serialize, sqlx::FromRow)]
|
|
||||||
pub struct Channel {
|
|
||||||
#[serde(default = "default_id", skip_deserializing)]
|
|
||||||
pub id: i32,
|
|
||||||
pub name: String,
|
|
||||||
pub preview_url: String,
|
|
||||||
pub extra_extensions: String,
|
|
||||||
pub active: bool,
|
|
||||||
pub public: String,
|
|
||||||
pub playlists: String,
|
|
||||||
pub storage: String,
|
|
||||||
pub last_date: Option<String>,
|
|
||||||
pub time_shift: f64,
|
|
||||||
|
|
||||||
#[sqlx(default)]
|
|
||||||
#[serde(default)]
|
|
||||||
pub utc_offset: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_id() -> i32 {
|
|
||||||
1
|
|
||||||
}
|
|
||||||
|
|
||||||
// #[serde_as]
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
|
||||||
pub struct User {
|
|
||||||
#[serde(skip_deserializing)]
|
|
||||||
pub id: i32,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub mail: Option<String>,
|
|
||||||
pub username: String,
|
|
||||||
#[serde(skip_serializing, default = "empty_string")]
|
|
||||||
pub password: String,
|
|
||||||
pub role_id: Option<i32>,
|
|
||||||
// #[serde_as(as = "StringWithSeparator::<CommaSeparator, i32>")]
|
|
||||||
pub channel_ids: Option<Vec<i32>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub token: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromRow<'_, SqliteRow> for User {
|
|
||||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
id: row.try_get("id").unwrap_or_default(),
|
|
||||||
mail: row.try_get("mail").unwrap_or_default(),
|
|
||||||
username: row.try_get("username").unwrap_or_default(),
|
|
||||||
password: row.try_get("password").unwrap_or_default(),
|
|
||||||
role_id: row.try_get("role_id").unwrap_or_default(),
|
|
||||||
channel_ids: Some(
|
|
||||||
row.try_get::<String, &str>("channel_ids")
|
|
||||||
.unwrap_or_default()
|
|
||||||
.split(',')
|
|
||||||
.map(|i| i.parse::<i32>().unwrap_or_default())
|
|
||||||
.collect(),
|
|
||||||
),
|
|
||||||
token: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn empty_string() -> String {
|
|
||||||
"".to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
|
||||||
pub struct UserMeta {
|
|
||||||
pub id: i32,
|
|
||||||
pub channels: Vec<i32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UserMeta {
|
|
||||||
pub fn new(id: i32, channels: Vec<i32>) -> Self {
|
|
||||||
Self { id, channels }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub enum Role {
|
|
||||||
GlobalAdmin,
|
|
||||||
ChannelAdmin,
|
|
||||||
User,
|
|
||||||
Guest,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Role {
|
|
||||||
pub fn set_role(role: &str) -> Self {
|
|
||||||
match role {
|
|
||||||
"global_admin" => Role::GlobalAdmin,
|
|
||||||
"channel_admin" => Role::ChannelAdmin,
|
|
||||||
"user" => Role::User,
|
|
||||||
_ => Role::Guest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Role {
|
|
||||||
type Err = String;
|
|
||||||
|
|
||||||
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
|
||||||
match input {
|
|
||||||
"global_admin" => Ok(Self::GlobalAdmin),
|
|
||||||
"channel_admin" => Ok(Self::ChannelAdmin),
|
|
||||||
"user" => Ok(Self::User),
|
|
||||||
_ => Ok(Self::Guest),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Role {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
Self::GlobalAdmin => write!(f, "global_admin"),
|
|
||||||
Self::ChannelAdmin => write!(f, "channel_admin"),
|
|
||||||
Self::User => write!(f, "user"),
|
|
||||||
Self::Guest => write!(f, "guest"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'r> sqlx::decode::Decode<'r, ::sqlx::Sqlite> for Role
|
|
||||||
where
|
|
||||||
&'r str: sqlx::decode::Decode<'r, sqlx::Sqlite>,
|
|
||||||
{
|
|
||||||
fn decode(
|
|
||||||
value: sqlx::sqlite::SqliteValueRef<'r>,
|
|
||||||
) -> Result<Role, Box<dyn Error + 'static + Send + Sync>> {
|
|
||||||
let value = <&str as sqlx::decode::Decode<sqlx::Sqlite>>::decode(value)?;
|
|
||||||
|
|
||||||
Ok(value.parse()?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromRow<'_, SqliteRow> for Role {
|
|
||||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
|
||||||
match row.get("name") {
|
|
||||||
"global_admin" => Ok(Self::GlobalAdmin),
|
|
||||||
"channel_admin" => Ok(Self::ChannelAdmin),
|
|
||||||
"user" => Ok(Self::User),
|
|
||||||
_ => Ok(Self::Guest),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone, sqlx::FromRow)]
|
|
||||||
pub struct TextPreset {
|
|
||||||
#[sqlx(default)]
|
|
||||||
#[serde(skip_deserializing)]
|
|
||||||
pub id: i32,
|
|
||||||
pub channel_id: i32,
|
|
||||||
pub name: String,
|
|
||||||
pub text: String,
|
|
||||||
pub x: String,
|
|
||||||
pub y: String,
|
|
||||||
#[serde(deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub fontsize: String,
|
|
||||||
#[serde(deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub line_spacing: String,
|
|
||||||
pub fontcolor: String,
|
|
||||||
pub r#box: String,
|
|
||||||
pub boxcolor: String,
|
|
||||||
#[serde(deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub boxborderw: String,
|
|
||||||
#[serde(deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub alpha: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Deserialize number or string
|
|
||||||
pub fn deserialize_number_or_string<'de, D>(deserializer: D) -> Result<String, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
struct StringOrNumberVisitor;
|
|
||||||
|
|
||||||
impl<'de> Visitor<'de> for StringOrNumberVisitor {
|
|
||||||
type Value = String;
|
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
||||||
formatter.write_str("a string or a number")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
|
|
||||||
let re = Regex::new(r"0,([0-9]+)").unwrap();
|
|
||||||
let clean_string = re.replace_all(value, "0.$1").to_string();
|
|
||||||
Ok(clean_string)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Self::Value, E> {
|
|
||||||
Ok(value.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_i64<E: de::Error>(self, value: i64) -> Result<Self::Value, E> {
|
|
||||||
Ok(value.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
|
|
||||||
Ok(value.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
deserializer.deserialize_any(StringOrNumberVisitor)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)]
|
|
||||||
pub struct Configuration {
|
|
||||||
pub id: i32,
|
|
||||||
pub channel_id: i32,
|
|
||||||
pub general_stop_threshold: f64,
|
|
||||||
|
|
||||||
pub mail_subject: String,
|
|
||||||
pub mail_recipient: String,
|
|
||||||
pub mail_level: String,
|
|
||||||
pub mail_interval: i64,
|
|
||||||
|
|
||||||
pub logging_ffmpeg_level: String,
|
|
||||||
pub logging_ingest_level: String,
|
|
||||||
pub logging_detect_silence: bool,
|
|
||||||
#[serde(default)]
|
|
||||||
pub logging_ignore: String,
|
|
||||||
|
|
||||||
pub processing_mode: String,
|
|
||||||
pub processing_audio_only: bool,
|
|
||||||
pub processing_copy_audio: bool,
|
|
||||||
pub processing_copy_video: bool,
|
|
||||||
pub processing_width: i64,
|
|
||||||
pub processing_height: i64,
|
|
||||||
pub processing_aspect: f64,
|
|
||||||
pub processing_fps: f64,
|
|
||||||
pub processing_add_logo: bool,
|
|
||||||
pub processing_logo: String,
|
|
||||||
pub processing_logo_scale: String,
|
|
||||||
pub processing_logo_opacity: f64,
|
|
||||||
pub processing_logo_position: String,
|
|
||||||
#[serde(default = "default_tracks")]
|
|
||||||
pub processing_audio_tracks: i32,
|
|
||||||
#[serde(default = "default_track_index")]
|
|
||||||
pub processing_audio_track_index: i32,
|
|
||||||
#[serde(default = "default_channels")]
|
|
||||||
pub processing_audio_channels: u8,
|
|
||||||
pub processing_volume: f64,
|
|
||||||
#[serde(default)]
|
|
||||||
pub processing_filter: String,
|
|
||||||
#[serde(default)]
|
|
||||||
pub processing_vtt_enable: bool,
|
|
||||||
#[serde(default)]
|
|
||||||
pub processing_vtt_dummy: Option<String>,
|
|
||||||
|
|
||||||
pub ingest_enable: bool,
|
|
||||||
pub ingest_param: String,
|
|
||||||
#[serde(default)]
|
|
||||||
pub ingest_filter: String,
|
|
||||||
|
|
||||||
pub playlist_day_start: String,
|
|
||||||
pub playlist_length: String,
|
|
||||||
pub playlist_infinit: bool,
|
|
||||||
|
|
||||||
pub storage_filler: String,
|
|
||||||
pub storage_extensions: String,
|
|
||||||
pub storage_shuffle: bool,
|
|
||||||
|
|
||||||
pub text_add: bool,
|
|
||||||
pub text_from_filename: bool,
|
|
||||||
pub text_font: String,
|
|
||||||
pub text_style: String,
|
|
||||||
pub text_regex: String,
|
|
||||||
|
|
||||||
pub task_enable: bool,
|
|
||||||
pub task_path: String,
|
|
||||||
|
|
||||||
pub output_mode: String,
|
|
||||||
pub output_param: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Configuration {
|
|
||||||
pub fn from(id: i32, channel_id: i32, config: PlayoutConfig) -> Self {
|
|
||||||
Self {
|
|
||||||
id,
|
|
||||||
channel_id,
|
|
||||||
general_stop_threshold: config.general.stop_threshold,
|
|
||||||
mail_subject: config.mail.subject,
|
|
||||||
mail_recipient: config.mail.recipient,
|
|
||||||
mail_level: config.mail.mail_level.to_string(),
|
|
||||||
mail_interval: config.mail.interval,
|
|
||||||
logging_ffmpeg_level: config.logging.ffmpeg_level,
|
|
||||||
logging_ingest_level: config.logging.ingest_level,
|
|
||||||
logging_detect_silence: config.logging.detect_silence,
|
|
||||||
logging_ignore: config.logging.ignore_lines.join(";"),
|
|
||||||
processing_mode: config.processing.mode.to_string(),
|
|
||||||
processing_audio_only: config.processing.audio_only,
|
|
||||||
processing_audio_track_index: config.processing.audio_track_index,
|
|
||||||
processing_copy_audio: config.processing.copy_audio,
|
|
||||||
processing_copy_video: config.processing.copy_video,
|
|
||||||
processing_width: config.processing.width,
|
|
||||||
processing_height: config.processing.height,
|
|
||||||
processing_aspect: config.processing.aspect,
|
|
||||||
processing_fps: config.processing.fps,
|
|
||||||
processing_add_logo: config.processing.add_logo,
|
|
||||||
processing_logo: config.processing.logo,
|
|
||||||
processing_logo_scale: config.processing.logo_scale,
|
|
||||||
processing_logo_opacity: config.processing.logo_opacity,
|
|
||||||
processing_logo_position: config.processing.logo_position,
|
|
||||||
processing_audio_tracks: config.processing.audio_tracks,
|
|
||||||
processing_audio_channels: config.processing.audio_channels,
|
|
||||||
processing_volume: config.processing.volume,
|
|
||||||
processing_filter: config.processing.custom_filter,
|
|
||||||
processing_vtt_enable: config.processing.vtt_enable,
|
|
||||||
processing_vtt_dummy: config.processing.vtt_dummy,
|
|
||||||
ingest_enable: config.ingest.enable,
|
|
||||||
ingest_param: config.ingest.input_param,
|
|
||||||
ingest_filter: config.ingest.custom_filter,
|
|
||||||
playlist_day_start: config.playlist.day_start,
|
|
||||||
playlist_length: config.playlist.length,
|
|
||||||
playlist_infinit: config.playlist.infinit,
|
|
||||||
storage_filler: config.storage.filler,
|
|
||||||
storage_extensions: config.storage.extensions.join(";"),
|
|
||||||
storage_shuffle: config.storage.shuffle,
|
|
||||||
text_add: config.text.add_text,
|
|
||||||
text_font: config.text.font,
|
|
||||||
text_from_filename: config.text.text_from_filename,
|
|
||||||
text_style: config.text.style,
|
|
||||||
text_regex: config.text.regex,
|
|
||||||
task_enable: config.task.enable,
|
|
||||||
task_path: config.task.path.to_string_lossy().to_string(),
|
|
||||||
output_mode: config.output.mode.to_string(),
|
|
||||||
output_param: config.output.output_param,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_track_index() -> i32 {
|
|
||||||
-1
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_tracks() -> i32 {
|
|
||||||
1
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_channels() -> u8 {
|
|
||||||
2
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)]
|
|
||||||
pub struct AdvancedConfiguration {
|
|
||||||
pub id: i32,
|
|
||||||
pub channel_id: i32,
|
|
||||||
pub decoder_input_param: Option<String>,
|
|
||||||
pub decoder_output_param: Option<String>,
|
|
||||||
pub encoder_input_param: Option<String>,
|
|
||||||
pub ingest_input_param: Option<String>,
|
|
||||||
pub filter_deinterlace: Option<String>,
|
|
||||||
pub filter_pad_scale_w: Option<String>,
|
|
||||||
pub filter_pad_scale_h: Option<String>,
|
|
||||||
pub filter_pad_video: Option<String>,
|
|
||||||
pub filter_fps: Option<String>,
|
|
||||||
pub filter_scale: Option<String>,
|
|
||||||
pub filter_set_dar: Option<String>,
|
|
||||||
pub filter_fade_in: Option<String>,
|
|
||||||
pub filter_fade_out: Option<String>,
|
|
||||||
pub filter_overlay_logo_scale: Option<String>,
|
|
||||||
pub filter_overlay_logo_fade_in: Option<String>,
|
|
||||||
pub filter_overlay_logo_fade_out: Option<String>,
|
|
||||||
pub filter_overlay_logo: Option<String>,
|
|
||||||
pub filter_tpad: Option<String>,
|
|
||||||
pub filter_drawtext_from_file: Option<String>,
|
|
||||||
pub filter_drawtext_from_zmq: Option<String>,
|
|
||||||
pub filter_aevalsrc: Option<String>,
|
|
||||||
pub filter_afade_in: Option<String>,
|
|
||||||
pub filter_afade_out: Option<String>,
|
|
||||||
pub filter_apad: Option<String>,
|
|
||||||
pub filter_volume: Option<String>,
|
|
||||||
pub filter_split: Option<String>,
|
|
||||||
}
|
|
@ -1,47 +0,0 @@
|
|||||||
use std::sync::{Arc, Mutex};
|
|
||||||
|
|
||||||
use actix_web::{dev::ServiceRequest, Error, HttpMessage};
|
|
||||||
use actix_web_grants::authorities::AttachAuthorities;
|
|
||||||
use actix_web_httpauth::extractors::bearer::BearerAuth;
|
|
||||||
use clap::Parser;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use sysinfo::{Disks, Networks, System};
|
|
||||||
|
|
||||||
pub mod api;
|
|
||||||
pub mod db;
|
|
||||||
pub mod macros;
|
|
||||||
pub mod player;
|
|
||||||
pub mod sse;
|
|
||||||
pub mod utils;
|
|
||||||
|
|
||||||
use api::auth;
|
|
||||||
use db::models::UserMeta;
|
|
||||||
use utils::advanced_config::AdvancedConfig;
|
|
||||||
use utils::args_parse::Args;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref ARGS: Args = Args::parse();
|
|
||||||
pub static ref DISKS: Arc<Mutex<Disks>> =
|
|
||||||
Arc::new(Mutex::new(Disks::new_with_refreshed_list()));
|
|
||||||
pub static ref NETWORKS: Arc<Mutex<Networks>> =
|
|
||||||
Arc::new(Mutex::new(Networks::new_with_refreshed_list()));
|
|
||||||
pub static ref SYS: Arc<Mutex<System>> = Arc::new(Mutex::new(System::new_all()));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn validator(
|
|
||||||
req: ServiceRequest,
|
|
||||||
credentials: BearerAuth,
|
|
||||||
) -> Result<ServiceRequest, (Error, ServiceRequest)> {
|
|
||||||
// We just get permissions from JWT
|
|
||||||
match auth::decode_jwt(credentials.token()).await {
|
|
||||||
Ok(claims) => {
|
|
||||||
req.attach(vec![claims.role]);
|
|
||||||
|
|
||||||
req.extensions_mut()
|
|
||||||
.insert(UserMeta::new(claims.id, claims.channels));
|
|
||||||
|
|
||||||
Ok(req)
|
|
||||||
}
|
|
||||||
Err(e) => Err((e, req)),
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,286 +0,0 @@
|
|||||||
use std::{
|
|
||||||
collections::HashSet,
|
|
||||||
fs::File,
|
|
||||||
io,
|
|
||||||
process::exit,
|
|
||||||
sync::{atomic::AtomicBool, Arc, Mutex},
|
|
||||||
thread,
|
|
||||||
};
|
|
||||||
|
|
||||||
use actix_web::{middleware::Logger, web, App, HttpServer};
|
|
||||||
use actix_web_httpauth::middleware::HttpAuthentication;
|
|
||||||
|
|
||||||
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
|
|
||||||
use actix_files::Files;
|
|
||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
|
|
||||||
use actix_web_static_files::ResourceFiles;
|
|
||||||
|
|
||||||
use log::*;
|
|
||||||
|
|
||||||
use ffplayout::{
|
|
||||||
api::routes::*,
|
|
||||||
db::{db_drop, db_pool, handles, models::init_globales},
|
|
||||||
player::{
|
|
||||||
controller::{ChannelController, ChannelManager},
|
|
||||||
utils::{get_date, is_remote, json_validate::validate_playlist, JsonPlaylist},
|
|
||||||
},
|
|
||||||
sse::{broadcast::Broadcaster, routes::*, SseAuthState},
|
|
||||||
utils::{
|
|
||||||
args_parse::run_args,
|
|
||||||
config::get_config,
|
|
||||||
logging::{init_logging, MailQueue},
|
|
||||||
playlist::generate_playlist,
|
|
||||||
},
|
|
||||||
validator, ARGS,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
|
|
||||||
use ffplayout::utils::public_path;
|
|
||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
|
||||||
|
|
||||||
fn thread_counter() -> usize {
|
|
||||||
let available_threads = thread::available_parallelism()
|
|
||||||
.map(|n| n.get())
|
|
||||||
.unwrap_or(1);
|
|
||||||
|
|
||||||
(available_threads / 2).max(2)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_web::main]
|
|
||||||
async fn main() -> std::io::Result<()> {
|
|
||||||
let mail_queues = Arc::new(Mutex::new(vec![]));
|
|
||||||
|
|
||||||
let pool = db_pool()
|
|
||||||
.await
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
|
|
||||||
|
|
||||||
if let Err(c) = run_args(&pool).await {
|
|
||||||
exit(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
init_globales(&pool).await;
|
|
||||||
init_logging(mail_queues.clone())?;
|
|
||||||
|
|
||||||
let channel_controllers = Arc::new(Mutex::new(ChannelController::new()));
|
|
||||||
|
|
||||||
if let Some(conn) = &ARGS.listen {
|
|
||||||
let channels = handles::select_related_channels(&pool, None)
|
|
||||||
.await
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
|
|
||||||
|
|
||||||
for channel in channels.iter() {
|
|
||||||
let config = get_config(&pool, channel.id)
|
|
||||||
.await
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
|
|
||||||
let manager = ChannelManager::new(Some(pool.clone()), channel.clone(), config.clone());
|
|
||||||
let m_queue = Arc::new(Mutex::new(MailQueue::new(channel.id, config.mail)));
|
|
||||||
|
|
||||||
channel_controllers
|
|
||||||
.lock()
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
|
|
||||||
.add(manager.clone());
|
|
||||||
|
|
||||||
if let Ok(mut mqs) = mail_queues.lock() {
|
|
||||||
mqs.push(m_queue.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
if channel.active {
|
|
||||||
manager.async_start().await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let ip_port = conn.split(':').collect::<Vec<&str>>();
|
|
||||||
let addr = ip_port[0];
|
|
||||||
let port = ip_port
|
|
||||||
.get(1)
|
|
||||||
.and_then(|p| p.parse::<u16>().ok())
|
|
||||||
.ok_or(io::Error::new(
|
|
||||||
io::ErrorKind::InvalidInput,
|
|
||||||
"<ADRESSE>:<PORT> needed! For example: 127.0.0.1:8787",
|
|
||||||
))?;
|
|
||||||
let controllers = web::Data::from(channel_controllers.clone());
|
|
||||||
let auth_state = web::Data::new(SseAuthState {
|
|
||||||
uuids: tokio::sync::Mutex::new(HashSet::new()),
|
|
||||||
});
|
|
||||||
let broadcast_data = Broadcaster::create();
|
|
||||||
let thread_count = thread_counter();
|
|
||||||
|
|
||||||
info!("Running ffplayout API, listen on http://{conn}");
|
|
||||||
|
|
||||||
let db_clone = pool.clone();
|
|
||||||
|
|
||||||
// no 'allow origin' here, give it to the reverse proxy
|
|
||||||
HttpServer::new(move || {
|
|
||||||
let queues = mail_queues.clone();
|
|
||||||
|
|
||||||
let auth = HttpAuthentication::bearer(validator);
|
|
||||||
let db_pool = web::Data::new(db_clone.clone());
|
|
||||||
// Customize logging format to get IP though proxies.
|
|
||||||
let logger = Logger::new("%{r}a \"%r\" %s %b \"%{Referer}i\" \"%{User-Agent}i\" %T")
|
|
||||||
.exclude_regex(r"/_nuxt/*");
|
|
||||||
|
|
||||||
let mut web_app = App::new()
|
|
||||||
.app_data(db_pool)
|
|
||||||
.app_data(web::Data::from(queues))
|
|
||||||
.app_data(controllers.clone())
|
|
||||||
.app_data(auth_state.clone())
|
|
||||||
.app_data(web::Data::from(Arc::clone(&broadcast_data)))
|
|
||||||
.wrap(logger)
|
|
||||||
.service(login)
|
|
||||||
.service(
|
|
||||||
web::scope("/api")
|
|
||||||
.wrap(auth.clone())
|
|
||||||
.service(add_user)
|
|
||||||
.service(get_user)
|
|
||||||
.service(get_by_name)
|
|
||||||
.service(get_users)
|
|
||||||
.service(remove_user)
|
|
||||||
.service(get_advanced_config)
|
|
||||||
.service(update_advanced_config)
|
|
||||||
.service(get_playout_config)
|
|
||||||
.service(update_playout_config)
|
|
||||||
.service(add_preset)
|
|
||||||
.service(get_presets)
|
|
||||||
.service(update_preset)
|
|
||||||
.service(delete_preset)
|
|
||||||
.service(get_channel)
|
|
||||||
.service(get_all_channels)
|
|
||||||
.service(patch_channel)
|
|
||||||
.service(add_channel)
|
|
||||||
.service(remove_channel)
|
|
||||||
.service(update_user)
|
|
||||||
.service(send_text_message)
|
|
||||||
.service(control_playout)
|
|
||||||
.service(media_current)
|
|
||||||
.service(process_control)
|
|
||||||
.service(get_playlist)
|
|
||||||
.service(save_playlist)
|
|
||||||
.service(gen_playlist)
|
|
||||||
.service(del_playlist)
|
|
||||||
.service(get_log)
|
|
||||||
.service(file_browser)
|
|
||||||
.service(add_dir)
|
|
||||||
.service(move_rename)
|
|
||||||
.service(remove)
|
|
||||||
.service(save_file)
|
|
||||||
.service(import_playlist)
|
|
||||||
.service(get_program)
|
|
||||||
.service(get_system_stat)
|
|
||||||
.service(generate_uuid),
|
|
||||||
)
|
|
||||||
.service(
|
|
||||||
web::scope("/data")
|
|
||||||
.service(validate_uuid)
|
|
||||||
.service(event_stream),
|
|
||||||
)
|
|
||||||
.service(get_file)
|
|
||||||
.service(get_public);
|
|
||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
|
|
||||||
{
|
|
||||||
// in release mode embed frontend
|
|
||||||
let generated = generate();
|
|
||||||
web_app =
|
|
||||||
web_app.service(ResourceFiles::new("/", generated).resolve_not_found_to_root());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
|
|
||||||
{
|
|
||||||
// in debug mode get frontend from path
|
|
||||||
web_app = web_app.service(Files::new("/", public_path()).index_file("index.html"));
|
|
||||||
}
|
|
||||||
|
|
||||||
web_app
|
|
||||||
})
|
|
||||||
.bind((addr, port))?
|
|
||||||
.workers(thread_count)
|
|
||||||
.run()
|
|
||||||
.await?;
|
|
||||||
} else if ARGS.drop_db {
|
|
||||||
db_drop().await;
|
|
||||||
} else {
|
|
||||||
let channels = ARGS.channels.clone().unwrap_or_else(|| vec![1]);
|
|
||||||
|
|
||||||
for (index, channel_id) in channels.iter().enumerate() {
|
|
||||||
let config = match get_config(&pool, *channel_id).await {
|
|
||||||
Ok(c) => c,
|
|
||||||
Err(e) => {
|
|
||||||
eprint!("No config found, channel may not exists!\nOriginal error message: ");
|
|
||||||
return Err(io::Error::new(io::ErrorKind::Other, e.to_string()));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let channel = handles::select_channel(&pool, channel_id)
|
|
||||||
.await
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
|
|
||||||
let manager = ChannelManager::new(Some(pool.clone()), channel.clone(), config.clone());
|
|
||||||
|
|
||||||
if ARGS.foreground {
|
|
||||||
if ARGS.channels.is_none() {
|
|
||||||
error!(
|
|
||||||
"Foreground mode needs at least 1 channel, run with `--channels (1 2 ...)`"
|
|
||||||
);
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
let m_queue = Arc::new(Mutex::new(MailQueue::new(*channel_id, config.mail)));
|
|
||||||
|
|
||||||
channel_controllers
|
|
||||||
.lock()
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
|
|
||||||
.add(manager.clone());
|
|
||||||
|
|
||||||
if let Ok(mut mqs) = mail_queues.lock() {
|
|
||||||
mqs.push(m_queue.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
manager.foreground_start(index).await;
|
|
||||||
} else if ARGS.generate.is_some() {
|
|
||||||
// run a simple playlist generator and save them to disk
|
|
||||||
if let Err(e) = generate_playlist(manager) {
|
|
||||||
error!("{e}");
|
|
||||||
exit(1);
|
|
||||||
};
|
|
||||||
} else if ARGS.validate {
|
|
||||||
let mut playlist_path = config.channel.playlists.clone();
|
|
||||||
let start_sec = config.playlist.start_sec.unwrap();
|
|
||||||
let date = get_date(false, start_sec, false);
|
|
||||||
|
|
||||||
if playlist_path.is_dir() || is_remote(&playlist_path.to_string_lossy()) {
|
|
||||||
let d: Vec<&str> = date.split('-').collect();
|
|
||||||
playlist_path = playlist_path
|
|
||||||
.join(d[0])
|
|
||||||
.join(d[1])
|
|
||||||
.join(date.clone())
|
|
||||||
.with_extension("json");
|
|
||||||
}
|
|
||||||
|
|
||||||
let f = File::options()
|
|
||||||
.read(true)
|
|
||||||
.write(false)
|
|
||||||
.open(&playlist_path)?;
|
|
||||||
|
|
||||||
let playlist: JsonPlaylist = serde_json::from_reader(f)?;
|
|
||||||
|
|
||||||
validate_playlist(
|
|
||||||
config,
|
|
||||||
Arc::new(Mutex::new(Vec::new())),
|
|
||||||
playlist,
|
|
||||||
Arc::new(AtomicBool::new(false)),
|
|
||||||
);
|
|
||||||
} else if !ARGS.init {
|
|
||||||
error!("Run ffplayout with parameters! Run ffplayout -h for more information.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for channel_ctl in &channel_controllers.lock().unwrap().channels {
|
|
||||||
channel_ctl.channel.lock().unwrap().active = false;
|
|
||||||
channel_ctl.stop_all();
|
|
||||||
}
|
|
||||||
|
|
||||||
pool.close().await;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,431 +0,0 @@
|
|||||||
use std::{
|
|
||||||
fmt, fs,
|
|
||||||
io::{self, Read},
|
|
||||||
path::Path,
|
|
||||||
process::Child,
|
|
||||||
sync::{
|
|
||||||
atomic::{AtomicBool, AtomicUsize, Ordering},
|
|
||||||
Arc, Mutex,
|
|
||||||
},
|
|
||||||
thread,
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
use actix_web::web;
|
|
||||||
use log::*;
|
|
||||||
use m3u8_rs::Playlist;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use sqlx::{Pool, Sqlite};
|
|
||||||
use walkdir::WalkDir;
|
|
||||||
|
|
||||||
use crate::player::{
|
|
||||||
output::{player, write_hls},
|
|
||||||
utils::{folder::fill_filler_list, Media},
|
|
||||||
};
|
|
||||||
use crate::utils::{
|
|
||||||
config::{OutputMode::*, PlayoutConfig},
|
|
||||||
errors::{ProcessError, ServiceError},
|
|
||||||
};
|
|
||||||
use crate::ARGS;
|
|
||||||
use crate::{
|
|
||||||
db::{handles, models::Channel},
|
|
||||||
utils::logging::Target,
|
|
||||||
};
|
|
||||||
|
|
||||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
|
||||||
|
|
||||||
/// Defined process units.
|
|
||||||
#[derive(Clone, Debug, Default, Copy, Eq, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub enum ProcessUnit {
|
|
||||||
#[default]
|
|
||||||
Decoder,
|
|
||||||
Encoder,
|
|
||||||
Ingest,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for ProcessUnit {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
ProcessUnit::Decoder => write!(f, "Decoder"),
|
|
||||||
ProcessUnit::Encoder => write!(f, "Encoder"),
|
|
||||||
ProcessUnit::Ingest => write!(f, "Ingest"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
use ProcessUnit::*;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
|
||||||
pub struct ChannelManager {
|
|
||||||
pub db_pool: Option<Pool<Sqlite>>,
|
|
||||||
pub config: Arc<Mutex<PlayoutConfig>>,
|
|
||||||
pub channel: Arc<Mutex<Channel>>,
|
|
||||||
pub decoder: Arc<Mutex<Option<Child>>>,
|
|
||||||
pub encoder: Arc<Mutex<Option<Child>>>,
|
|
||||||
pub ingest: Arc<Mutex<Option<Child>>>,
|
|
||||||
pub ingest_is_running: Arc<AtomicBool>,
|
|
||||||
pub is_terminated: Arc<AtomicBool>,
|
|
||||||
pub is_alive: Arc<AtomicBool>,
|
|
||||||
pub is_processing: Arc<AtomicBool>,
|
|
||||||
pub filter_chain: Option<Arc<Mutex<Vec<String>>>>,
|
|
||||||
pub current_date: Arc<Mutex<String>>,
|
|
||||||
pub list_init: Arc<AtomicBool>,
|
|
||||||
pub current_media: Arc<Mutex<Option<Media>>>,
|
|
||||||
pub current_list: Arc<Mutex<Vec<Media>>>,
|
|
||||||
pub filler_list: Arc<Mutex<Vec<Media>>>,
|
|
||||||
pub current_index: Arc<AtomicUsize>,
|
|
||||||
pub filler_index: Arc<AtomicUsize>,
|
|
||||||
pub run_count: Arc<AtomicUsize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ChannelManager {
|
|
||||||
pub fn new(db_pool: Option<Pool<Sqlite>>, channel: Channel, config: PlayoutConfig) -> Self {
|
|
||||||
Self {
|
|
||||||
db_pool,
|
|
||||||
is_alive: Arc::new(AtomicBool::new(false)),
|
|
||||||
channel: Arc::new(Mutex::new(channel)),
|
|
||||||
config: Arc::new(Mutex::new(config)),
|
|
||||||
list_init: Arc::new(AtomicBool::new(true)),
|
|
||||||
current_media: Arc::new(Mutex::new(None)),
|
|
||||||
current_list: Arc::new(Mutex::new(vec![Media::new(0, "", false)])),
|
|
||||||
filler_list: Arc::new(Mutex::new(vec![])),
|
|
||||||
current_index: Arc::new(AtomicUsize::new(0)),
|
|
||||||
filler_index: Arc::new(AtomicUsize::new(0)),
|
|
||||||
run_count: Arc::new(AtomicUsize::new(0)),
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_channel(self, other: &Channel) {
|
|
||||||
let mut channel = self.channel.lock().unwrap();
|
|
||||||
|
|
||||||
channel.name.clone_from(&other.name);
|
|
||||||
channel.preview_url.clone_from(&other.preview_url);
|
|
||||||
channel.extra_extensions.clone_from(&other.extra_extensions);
|
|
||||||
channel.active.clone_from(&other.active);
|
|
||||||
channel.last_date.clone_from(&other.last_date);
|
|
||||||
channel.time_shift.clone_from(&other.time_shift);
|
|
||||||
channel.utc_offset.clone_from(&other.utc_offset);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_config(&self, new_config: PlayoutConfig) {
|
|
||||||
let mut config = self.config.lock().unwrap();
|
|
||||||
*config = new_config;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn async_start(&self) {
|
|
||||||
if !self.is_alive.load(Ordering::SeqCst) {
|
|
||||||
self.run_count.fetch_add(1, Ordering::SeqCst);
|
|
||||||
self.is_alive.store(true, Ordering::SeqCst);
|
|
||||||
self.is_terminated.store(false, Ordering::SeqCst);
|
|
||||||
self.list_init.store(true, Ordering::SeqCst);
|
|
||||||
|
|
||||||
let pool_clone = self.db_pool.clone().unwrap();
|
|
||||||
let self_clone = self.clone();
|
|
||||||
let channel_id = self.channel.lock().unwrap().id;
|
|
||||||
|
|
||||||
if let Err(e) = handles::update_player(&pool_clone, channel_id, true).await {
|
|
||||||
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
|
|
||||||
};
|
|
||||||
|
|
||||||
thread::spawn(move || {
|
|
||||||
let mut run_endless = true;
|
|
||||||
|
|
||||||
while run_endless {
|
|
||||||
let run_count = self_clone.run_count.clone();
|
|
||||||
|
|
||||||
if let Err(e) = start_channel(self_clone.clone()) {
|
|
||||||
run_count.fetch_sub(1, Ordering::SeqCst);
|
|
||||||
error!("{e}");
|
|
||||||
};
|
|
||||||
|
|
||||||
let active = self_clone.channel.lock().unwrap().active;
|
|
||||||
|
|
||||||
if !active {
|
|
||||||
run_endless = false;
|
|
||||||
} else {
|
|
||||||
self_clone.run_count.fetch_add(1, Ordering::SeqCst);
|
|
||||||
self_clone.is_alive.store(true, Ordering::SeqCst);
|
|
||||||
self_clone.is_terminated.store(false, Ordering::SeqCst);
|
|
||||||
self_clone.list_init.store(true, Ordering::SeqCst);
|
|
||||||
|
|
||||||
thread::sleep(Duration::from_millis(250));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
trace!("Async start done");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn foreground_start(&self, index: usize) {
|
|
||||||
if !self.is_alive.load(Ordering::SeqCst) {
|
|
||||||
self.run_count.fetch_add(1, Ordering::SeqCst);
|
|
||||||
self.is_alive.store(true, Ordering::SeqCst);
|
|
||||||
self.is_terminated.store(false, Ordering::SeqCst);
|
|
||||||
self.list_init.store(true, Ordering::SeqCst);
|
|
||||||
|
|
||||||
let pool_clone = self.db_pool.clone().unwrap();
|
|
||||||
let self_clone = self.clone();
|
|
||||||
let channel_id = self.channel.lock().unwrap().id;
|
|
||||||
|
|
||||||
if let Err(e) = handles::update_player(&pool_clone, channel_id, true).await {
|
|
||||||
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
|
|
||||||
};
|
|
||||||
|
|
||||||
if index + 1 == ARGS.channels.clone().unwrap_or_default().len() {
|
|
||||||
let run_count = self_clone.run_count.clone();
|
|
||||||
|
|
||||||
tokio::task::spawn_blocking(move || {
|
|
||||||
if let Err(e) = start_channel(self_clone) {
|
|
||||||
run_count.fetch_sub(1, Ordering::SeqCst);
|
|
||||||
error!("{e}");
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
} else {
|
|
||||||
thread::spawn(move || {
|
|
||||||
let run_count = self_clone.run_count.clone();
|
|
||||||
|
|
||||||
if let Err(e) = start_channel(self_clone) {
|
|
||||||
run_count.fetch_sub(1, Ordering::SeqCst);
|
|
||||||
error!("{e}");
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn stop(&self, unit: ProcessUnit) -> Result<(), ProcessError> {
|
|
||||||
match unit {
|
|
||||||
Decoder => {
|
|
||||||
if let Some(proc) = self.decoder.lock()?.as_mut() {
|
|
||||||
proc.kill()
|
|
||||||
.map_err(|e| ProcessError::Custom(format!("Decoder: {e}")))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Encoder => {
|
|
||||||
if let Some(proc) = self.encoder.lock()?.as_mut() {
|
|
||||||
proc.kill()
|
|
||||||
.map_err(|e| ProcessError::Custom(format!("Encoder: {e}")))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ingest => {
|
|
||||||
if let Some(proc) = self.ingest.lock()?.as_mut() {
|
|
||||||
proc.kill()
|
|
||||||
.map_err(|e| ProcessError::Custom(format!("Ingest: {e}")))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.wait(unit)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_wait(
|
|
||||||
&self,
|
|
||||||
unit: ProcessUnit,
|
|
||||||
child: &Arc<Mutex<Option<Child>>>,
|
|
||||||
) -> Result<(), ProcessError> {
|
|
||||||
if let Some(proc) = child.lock().unwrap().as_mut() {
|
|
||||||
loop {
|
|
||||||
match proc.try_wait() {
|
|
||||||
Ok(Some(_)) => break,
|
|
||||||
Ok(None) => thread::sleep(Duration::from_millis(10)),
|
|
||||||
Err(e) => return Err(ProcessError::Custom(format!("{unit}: {e}"))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Wait for process to proper close.
|
|
||||||
/// This prevents orphaned/zombi processes in system
|
|
||||||
pub fn wait(&self, unit: ProcessUnit) -> Result<(), ProcessError> {
|
|
||||||
match unit {
|
|
||||||
Decoder => self.run_wait(unit, &self.decoder)?,
|
|
||||||
Encoder => self.run_wait(unit, &self.encoder)?,
|
|
||||||
Ingest => self.run_wait(unit, &self.ingest)?,
|
|
||||||
}
|
|
||||||
|
|
||||||
thread::sleep(Duration::from_millis(50));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn async_stop(&self) -> Result<(), ServiceError> {
|
|
||||||
let channel_id = self.channel.lock().unwrap().id;
|
|
||||||
|
|
||||||
if self.is_alive.load(Ordering::SeqCst) {
|
|
||||||
debug!(target: Target::all(), channel = channel_id; "Deactivate playout and stop all child processes from channel: <yellow>{channel_id}</>");
|
|
||||||
}
|
|
||||||
|
|
||||||
self.is_terminated.store(true, Ordering::SeqCst);
|
|
||||||
self.is_alive.store(false, Ordering::SeqCst);
|
|
||||||
self.ingest_is_running.store(false, Ordering::SeqCst);
|
|
||||||
self.run_count.fetch_sub(1, Ordering::SeqCst);
|
|
||||||
let pool = self.db_pool.clone().unwrap();
|
|
||||||
|
|
||||||
if let Err(e) = handles::update_player(&pool, channel_id, false).await {
|
|
||||||
error!(target: Target::all(), channel = channel_id; "Unable write to player status: {e}");
|
|
||||||
};
|
|
||||||
|
|
||||||
for unit in [Decoder, Encoder, Ingest] {
|
|
||||||
let self_clone = self.clone();
|
|
||||||
|
|
||||||
if let Err(e) = web::block(move || self_clone.stop(unit)).await? {
|
|
||||||
if !e.to_string().contains("exited process") {
|
|
||||||
error!(target: Target::all(), channel = channel_id; "{e}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// No matter what is running, terminate them all.
|
|
||||||
pub fn stop_all(&self) {
|
|
||||||
let channel_id = self.channel.lock().unwrap().id;
|
|
||||||
|
|
||||||
if self.is_alive.load(Ordering::SeqCst) {
|
|
||||||
debug!(target: Target::all(), channel = channel_id; "Stop all child processes from channel: <yellow>{channel_id}</>");
|
|
||||||
}
|
|
||||||
|
|
||||||
self.is_terminated.store(true, Ordering::SeqCst);
|
|
||||||
self.is_alive.store(false, Ordering::SeqCst);
|
|
||||||
self.ingest_is_running.store(false, Ordering::SeqCst);
|
|
||||||
self.run_count.fetch_sub(1, Ordering::SeqCst);
|
|
||||||
|
|
||||||
for unit in [Decoder, Encoder, Ingest] {
|
|
||||||
if let Err(e) = self.stop(unit) {
|
|
||||||
if !e.to_string().contains("exited process") {
|
|
||||||
error!(target: Target::all(), channel = channel_id; "{e}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
|
||||||
pub struct ChannelController {
|
|
||||||
pub channels: Vec<ChannelManager>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ChannelController {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { channels: vec![] }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add(&mut self, manager: ChannelManager) {
|
|
||||||
self.channels.push(manager);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, id: i32) -> Option<ChannelManager> {
|
|
||||||
for manager in self.channels.iter() {
|
|
||||||
if manager.channel.lock().unwrap().id == id {
|
|
||||||
return Some(manager.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remove(&mut self, channel_id: i32) {
|
|
||||||
self.channels.retain(|manager| {
|
|
||||||
let channel = manager.channel.lock().unwrap();
|
|
||||||
channel.id != channel_id
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run_count(&self) -> usize {
|
|
||||||
self.channels
|
|
||||||
.iter()
|
|
||||||
.filter(|manager| manager.is_alive.load(Ordering::SeqCst))
|
|
||||||
.count()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn start_channel(manager: ChannelManager) -> Result<(), ProcessError> {
|
|
||||||
let config = manager.config.lock()?.clone();
|
|
||||||
let mode = config.output.mode.clone();
|
|
||||||
let filler_list = manager.filler_list.clone();
|
|
||||||
let channel_id = config.general.channel_id;
|
|
||||||
|
|
||||||
drain_hls_path(&config.channel.public)?;
|
|
||||||
|
|
||||||
debug!(target: Target::all(), channel = channel_id; "Start ffplayout v{VERSION}, channel: <yellow>{channel_id}</>");
|
|
||||||
|
|
||||||
// Fill filler list, can also be a single file.
|
|
||||||
thread::spawn(move || {
|
|
||||||
fill_filler_list(&config, Some(filler_list));
|
|
||||||
});
|
|
||||||
|
|
||||||
match mode {
|
|
||||||
// write files/playlist to HLS m3u8 playlist
|
|
||||||
HLS => write_hls(manager),
|
|
||||||
// play on desktop or stream to a remote target
|
|
||||||
_ => player(manager),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn drain_hls_path(path: &Path) -> io::Result<()> {
|
|
||||||
let m3u8_files = find_m3u8_files(path)?;
|
|
||||||
let mut pl_segments = vec![];
|
|
||||||
|
|
||||||
for file in m3u8_files {
|
|
||||||
let mut file = std::fs::File::open(file).unwrap();
|
|
||||||
let mut bytes: Vec<u8> = Vec::new();
|
|
||||||
file.read_to_end(&mut bytes).unwrap();
|
|
||||||
|
|
||||||
if let Ok(Playlist::MediaPlaylist(pl)) = m3u8_rs::parse_playlist_res(&bytes) {
|
|
||||||
for segment in pl.segments {
|
|
||||||
pl_segments.push(segment.uri);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
delete_old_segments(path, &pl_segments)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Recursively searches for all files with the .m3u8 extension in the specified path.
|
|
||||||
fn find_m3u8_files(path: &Path) -> io::Result<Vec<String>> {
|
|
||||||
let mut m3u8_files = Vec::new();
|
|
||||||
|
|
||||||
for entry in WalkDir::new(path)
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|e| e.ok())
|
|
||||||
.filter(|e| e.path().is_file() && e.path().extension().map_or(false, |ext| ext == "m3u8"))
|
|
||||||
{
|
|
||||||
m3u8_files.push(entry.path().to_string_lossy().to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(m3u8_files)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if segment is in playlist, if not, delete it.
|
|
||||||
fn delete_old_segments<P: AsRef<Path> + Clone + std::fmt::Debug>(
|
|
||||||
path: P,
|
|
||||||
pl_segments: &[String],
|
|
||||||
) -> io::Result<()> {
|
|
||||||
for entry in WalkDir::new(path)
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|e| e.ok())
|
|
||||||
.filter(|e| {
|
|
||||||
e.path().is_file()
|
|
||||||
&& e.path()
|
|
||||||
.extension()
|
|
||||||
.map_or(false, |ext| ext == "ts" || ext == "vtt")
|
|
||||||
})
|
|
||||||
{
|
|
||||||
let filename = entry.file_name().to_string_lossy().to_string();
|
|
||||||
|
|
||||||
if !pl_segments.contains(&filename) {
|
|
||||||
fs::remove_file(entry.path())?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,184 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io::{BufRead, BufReader, Read},
|
|
||||||
process::{ChildStderr, Command, Stdio},
|
|
||||||
sync::{atomic::Ordering, mpsc::SyncSender},
|
|
||||||
thread,
|
|
||||||
};
|
|
||||||
|
|
||||||
use log::*;
|
|
||||||
|
|
||||||
use crate::utils::{
|
|
||||||
config::{PlayoutConfig, FFMPEG_IGNORE_ERRORS, FFMPEG_UNRECOVERABLE_ERRORS},
|
|
||||||
logging::{log_line, Target},
|
|
||||||
};
|
|
||||||
use crate::vec_strings;
|
|
||||||
use crate::{
|
|
||||||
player::{
|
|
||||||
controller::{ChannelManager, ProcessUnit::*},
|
|
||||||
utils::{is_free_tcp_port, valid_stream, Media},
|
|
||||||
},
|
|
||||||
utils::errors::ProcessError,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn server_monitor(
|
|
||||||
id: i32,
|
|
||||||
level: &str,
|
|
||||||
ignore: Vec<String>,
|
|
||||||
buffer: BufReader<ChildStderr>,
|
|
||||||
channel_mgr: ChannelManager,
|
|
||||||
) -> Result<(), ProcessError> {
|
|
||||||
for line in buffer.lines() {
|
|
||||||
let line = line?;
|
|
||||||
|
|
||||||
if !FFMPEG_IGNORE_ERRORS.iter().any(|i| line.contains(*i))
|
|
||||||
&& !ignore.iter().any(|i| line.contains(i))
|
|
||||||
{
|
|
||||||
log_line(&line, level);
|
|
||||||
}
|
|
||||||
|
|
||||||
if line.contains("rtmp") && line.contains("Unexpected stream") && !valid_stream(&line) {
|
|
||||||
warn!(target: Target::file_mail(), channel = id; "Unexpected ingest stream: {line}");
|
|
||||||
|
|
||||||
if let Err(e) = channel_mgr.stop(Ingest) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e}");
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if FFMPEG_UNRECOVERABLE_ERRORS
|
|
||||||
.iter()
|
|
||||||
.any(|i| line.contains(*i))
|
|
||||||
{
|
|
||||||
error!(target: Target::file_mail(), channel = id; "Hit unrecoverable error!");
|
|
||||||
channel_mgr.channel.lock().unwrap().active = false;
|
|
||||||
channel_mgr.stop_all();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ffmpeg Ingest Server
|
|
||||||
///
|
|
||||||
/// Start ffmpeg in listen mode, and wait for input.
|
|
||||||
pub fn ingest_server(
|
|
||||||
config: PlayoutConfig,
|
|
||||||
ingest_sender: SyncSender<(usize, [u8; 65088])>,
|
|
||||||
channel_mgr: ChannelManager,
|
|
||||||
) -> Result<(), ProcessError> {
|
|
||||||
let id = config.general.channel_id;
|
|
||||||
let mut buffer: [u8; 65088] = [0; 65088];
|
|
||||||
let mut server_cmd = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
|
|
||||||
let stream_input = config.ingest.input_cmd.clone().unwrap();
|
|
||||||
let mut dummy_media = Media::new(0, "Live Stream", false);
|
|
||||||
dummy_media.unit = Ingest;
|
|
||||||
dummy_media.add_filter(&config, &None);
|
|
||||||
let is_terminated = channel_mgr.is_terminated.clone();
|
|
||||||
let ingest_is_running = channel_mgr.ingest_is_running.clone();
|
|
||||||
let vtt_dummy = config
|
|
||||||
.channel
|
|
||||||
.storage
|
|
||||||
.join(config.processing.vtt_dummy.clone().unwrap_or_default());
|
|
||||||
|
|
||||||
if let Some(ingest_input_cmd) = config.advanced.ingest.input_cmd {
|
|
||||||
server_cmd.append(&mut ingest_input_cmd.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
server_cmd.append(&mut stream_input.clone());
|
|
||||||
|
|
||||||
if config.processing.vtt_enable && vtt_dummy.is_file() {
|
|
||||||
server_cmd.append(&mut vec_strings!["-i", vtt_dummy.to_string_lossy()]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(mut filter) = dummy_media.filter {
|
|
||||||
server_cmd.append(&mut filter.cmd());
|
|
||||||
server_cmd.append(&mut filter.map());
|
|
||||||
}
|
|
||||||
|
|
||||||
if config.processing.vtt_enable && vtt_dummy.is_file() {
|
|
||||||
server_cmd.append(&mut vec_strings!("-map", "1:s"));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(mut cmd) = config.processing.cmd {
|
|
||||||
server_cmd.append(&mut cmd);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut is_running;
|
|
||||||
|
|
||||||
debug!(target: Target::file_mail(), channel = id;
|
|
||||||
"Server CMD: <bright-blue>\"ffmpeg {}\"</>",
|
|
||||||
server_cmd.join(" ")
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(url) = stream_input.iter().find(|s| s.contains("://")) {
|
|
||||||
if !is_free_tcp_port(id, url) {
|
|
||||||
channel_mgr.channel.lock().unwrap().active = false;
|
|
||||||
channel_mgr.stop_all();
|
|
||||||
} else {
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Start ingest server, listening on: <b><magenta>{url}</></b>");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
while !is_terminated.load(Ordering::SeqCst) {
|
|
||||||
let proc_ctl = channel_mgr.clone();
|
|
||||||
let level = config.logging.ingest_level.clone();
|
|
||||||
let ignore = config.logging.ignore_lines.clone();
|
|
||||||
let mut server_proc = match Command::new("ffmpeg")
|
|
||||||
.args(server_cmd.clone())
|
|
||||||
.stdout(Stdio::piped())
|
|
||||||
.stderr(Stdio::piped())
|
|
||||||
.spawn()
|
|
||||||
{
|
|
||||||
Err(e) => {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "couldn't spawn ingest server: {e}");
|
|
||||||
panic!("couldn't spawn ingest server: {e}")
|
|
||||||
}
|
|
||||||
Ok(proc) => proc,
|
|
||||||
};
|
|
||||||
let mut ingest_reader = BufReader::new(server_proc.stdout.take().unwrap());
|
|
||||||
let server_err = BufReader::new(server_proc.stderr.take().unwrap());
|
|
||||||
let error_reader_thread =
|
|
||||||
thread::spawn(move || server_monitor(id, &level, ignore, server_err, proc_ctl));
|
|
||||||
|
|
||||||
*channel_mgr.ingest.lock().unwrap() = Some(server_proc);
|
|
||||||
is_running = false;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let bytes_len = match ingest_reader.read(&mut buffer[..]) {
|
|
||||||
Ok(length) => length,
|
|
||||||
Err(e) => {
|
|
||||||
debug!(target: Target::file_mail(), channel = id; "Ingest server read {e:?}");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if !is_running {
|
|
||||||
ingest_is_running.store(true, Ordering::SeqCst);
|
|
||||||
is_running = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if bytes_len > 0 {
|
|
||||||
if let Err(e) = ingest_sender.send((bytes_len, buffer)) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "Ingest server write error: {e:?}");
|
|
||||||
|
|
||||||
is_terminated.store(true, Ordering::SeqCst);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
drop(ingest_reader);
|
|
||||||
ingest_is_running.store(false, Ordering::SeqCst);
|
|
||||||
|
|
||||||
if let Err(e) = channel_mgr.wait(Ingest) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e}")
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(e) = error_reader_thread.join() {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e:?}");
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,50 +0,0 @@
|
|||||||
use std::thread;
|
|
||||||
|
|
||||||
use log::*;
|
|
||||||
|
|
||||||
pub mod folder;
|
|
||||||
pub mod ingest;
|
|
||||||
pub mod playlist;
|
|
||||||
|
|
||||||
pub use folder::watchman;
|
|
||||||
pub use ingest::ingest_server;
|
|
||||||
pub use playlist::CurrentProgram;
|
|
||||||
|
|
||||||
use crate::player::{
|
|
||||||
controller::ChannelManager,
|
|
||||||
utils::{folder::FolderSource, Media},
|
|
||||||
};
|
|
||||||
use crate::utils::{config::ProcessMode::*, logging::Target};
|
|
||||||
|
|
||||||
/// Create a source iterator from playlist, or from folder.
|
|
||||||
pub fn source_generator(manager: ChannelManager) -> Box<dyn Iterator<Item = Media>> {
|
|
||||||
let config = manager.config.lock().unwrap().clone();
|
|
||||||
let id = config.general.channel_id;
|
|
||||||
let is_terminated = manager.is_terminated.clone();
|
|
||||||
let current_list = manager.current_list.clone();
|
|
||||||
|
|
||||||
match config.processing.mode {
|
|
||||||
Folder => {
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Playout in folder mode");
|
|
||||||
debug!(target: Target::file_mail(), channel = id;
|
|
||||||
"Monitor folder: <b><magenta>{:?}</></b>",
|
|
||||||
config.channel.storage
|
|
||||||
);
|
|
||||||
|
|
||||||
let config_clone = config.clone();
|
|
||||||
let folder_source = FolderSource::new(&config, manager);
|
|
||||||
let list_clone = current_list.clone();
|
|
||||||
|
|
||||||
// Spawn a thread to monitor folder for file changes.
|
|
||||||
thread::spawn(move || watchman(config_clone, is_terminated.clone(), list_clone));
|
|
||||||
|
|
||||||
Box::new(folder_source) as Box<dyn Iterator<Item = Media>>
|
|
||||||
}
|
|
||||||
Playlist => {
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Playout in playlist mode");
|
|
||||||
let program = CurrentProgram::new(manager);
|
|
||||||
|
|
||||||
Box::new(program) as Box<dyn Iterator<Item = Media>>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,319 +0,0 @@
|
|||||||
/*
|
|
||||||
This module write the files compression directly to a hls (m3u8) playlist,
|
|
||||||
without pre- and post-processing.
|
|
||||||
|
|
||||||
Example config:
|
|
||||||
|
|
||||||
out:
|
|
||||||
output_param: >-
|
|
||||||
...
|
|
||||||
|
|
||||||
-flags +cgop
|
|
||||||
-f hls
|
|
||||||
-hls_time 6
|
|
||||||
-hls_list_size 600
|
|
||||||
-hls_flags append_list+delete_segments+omit_endlist+program_date_time
|
|
||||||
-hls_segment_filename /var/www/html/live/stream-%d.ts /var/www/html/live/stream.m3u8
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
io::{BufRead, BufReader},
|
|
||||||
process::{Command, Stdio},
|
|
||||||
sync::atomic::Ordering,
|
|
||||||
thread::{self, sleep},
|
|
||||||
time::{Duration, SystemTime},
|
|
||||||
};
|
|
||||||
|
|
||||||
use log::*;
|
|
||||||
|
|
||||||
use crate::utils::{logging::log_line, task_runner};
|
|
||||||
use crate::vec_strings;
|
|
||||||
use crate::{
|
|
||||||
player::{
|
|
||||||
controller::{ChannelManager, ProcessUnit::*},
|
|
||||||
input::source_generator,
|
|
||||||
utils::{
|
|
||||||
get_delta, is_free_tcp_port, prepare_output_cmd, sec_to_time, stderr_reader,
|
|
||||||
valid_stream, Media,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
utils::{errors::ProcessError, logging::Target},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Ingest Server for HLS
|
|
||||||
fn ingest_to_hls_server(manager: ChannelManager) -> Result<(), ProcessError> {
|
|
||||||
let config = manager.config.lock().unwrap();
|
|
||||||
let id = config.general.channel_id;
|
|
||||||
let playlist_init = manager.list_init.clone();
|
|
||||||
let chain = manager.filter_chain.clone();
|
|
||||||
let mut error_count = 0;
|
|
||||||
|
|
||||||
let mut server_prefix = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
|
|
||||||
let stream_input = config.ingest.input_cmd.clone().unwrap();
|
|
||||||
let mut dummy_media = Media::new(0, "Live Stream", false);
|
|
||||||
dummy_media.unit = Ingest;
|
|
||||||
|
|
||||||
let is_terminated = manager.is_terminated.clone();
|
|
||||||
let ingest_is_running = manager.ingest_is_running.clone();
|
|
||||||
|
|
||||||
if let Some(ingest_input_cmd) = &config.advanced.ingest.input_cmd {
|
|
||||||
server_prefix.append(&mut ingest_input_cmd.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
server_prefix.append(&mut stream_input.clone());
|
|
||||||
|
|
||||||
if config.processing.vtt_enable {
|
|
||||||
let vtt_dummy = config
|
|
||||||
.channel
|
|
||||||
.storage
|
|
||||||
.join(config.processing.vtt_dummy.clone().unwrap_or_default());
|
|
||||||
|
|
||||||
if vtt_dummy.is_file() {
|
|
||||||
server_prefix.append(&mut vec_strings!["-i", vtt_dummy.to_string_lossy()]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut is_running;
|
|
||||||
|
|
||||||
if let Some(url) = stream_input.iter().find(|s| s.contains("://")) {
|
|
||||||
if !is_free_tcp_port(id, url) {
|
|
||||||
manager.channel.lock().unwrap().active = false;
|
|
||||||
manager.stop_all();
|
|
||||||
} else {
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Start ingest server, listening on: <b><magenta>{url}</></b>");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
drop(config);
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let config = manager.config.lock().unwrap().clone();
|
|
||||||
dummy_media.add_filter(&config, &chain);
|
|
||||||
let server_cmd = prepare_output_cmd(&config, server_prefix.clone(), &dummy_media.filter);
|
|
||||||
let timer = SystemTime::now();
|
|
||||||
|
|
||||||
debug!(target: Target::file_mail(), channel = id;
|
|
||||||
"Server CMD: <bright-blue>\"ffmpeg {}\"</>",
|
|
||||||
server_cmd.join(" ")
|
|
||||||
);
|
|
||||||
|
|
||||||
let proc_ctl = manager.clone();
|
|
||||||
let mut server_proc = match Command::new("ffmpeg")
|
|
||||||
.args(server_cmd.clone())
|
|
||||||
.stderr(Stdio::piped())
|
|
||||||
.spawn()
|
|
||||||
{
|
|
||||||
Err(e) => {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "couldn't spawn ingest server: {e}");
|
|
||||||
panic!("couldn't spawn ingest server: {e}");
|
|
||||||
}
|
|
||||||
Ok(proc) => proc,
|
|
||||||
};
|
|
||||||
|
|
||||||
let server_err = BufReader::new(server_proc.stderr.take().unwrap());
|
|
||||||
*manager.ingest.lock().unwrap() = Some(server_proc);
|
|
||||||
is_running = false;
|
|
||||||
|
|
||||||
for line in server_err.lines() {
|
|
||||||
let line = line?;
|
|
||||||
|
|
||||||
if line.contains("rtmp") && line.contains("Unexpected stream") && !valid_stream(&line) {
|
|
||||||
warn!(target: Target::file_mail(), channel = id; "Unexpected ingest stream: {line}");
|
|
||||||
|
|
||||||
if let Err(e) = proc_ctl.stop(Ingest) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e}");
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if !is_running && line.contains("Input #0") {
|
|
||||||
ingest_is_running.store(true, Ordering::SeqCst);
|
|
||||||
playlist_init.store(true, Ordering::SeqCst);
|
|
||||||
is_running = true;
|
|
||||||
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Switch from {} to live ingest", config.processing.mode);
|
|
||||||
|
|
||||||
if let Err(e) = manager.stop(Decoder) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ingest_is_running.load(Ordering::SeqCst) {
|
|
||||||
log_line(&line, &config.logging.ingest_level);
|
|
||||||
} else {
|
|
||||||
log_line(&line, &config.logging.ffmpeg_level);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ingest_is_running.load(Ordering::SeqCst) {
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Switch from live ingest to {}", config.processing.mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
ingest_is_running.store(false, Ordering::SeqCst);
|
|
||||||
|
|
||||||
if let Err(e) = manager.wait(Ingest) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e}")
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_terminated.load(Ordering::SeqCst) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(elapsed) = timer.elapsed() {
|
|
||||||
if elapsed.as_millis() < 300 {
|
|
||||||
error_count += 1;
|
|
||||||
|
|
||||||
if error_count > 10 {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "Reach fatal error count in ingest, terminate channel!");
|
|
||||||
manager.channel.lock().unwrap().active = false;
|
|
||||||
manager.stop_all();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error_count = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// HLS Writer
|
|
||||||
///
|
|
||||||
/// Write with single ffmpeg instance directly to a HLS playlist.
|
|
||||||
pub fn write_hls(manager: ChannelManager) -> Result<(), ProcessError> {
|
|
||||||
let config = manager.config.lock()?.clone();
|
|
||||||
let id = config.general.channel_id;
|
|
||||||
let current_media = manager.current_media.clone();
|
|
||||||
let is_terminated = manager.is_terminated.clone();
|
|
||||||
|
|
||||||
let ff_log_format = format!("level+{}", config.logging.ffmpeg_level.to_lowercase());
|
|
||||||
|
|
||||||
let channel_mgr_2 = manager.clone();
|
|
||||||
let ingest_is_running = manager.ingest_is_running.clone();
|
|
||||||
|
|
||||||
let get_source = source_generator(manager.clone());
|
|
||||||
|
|
||||||
// spawn a thread for ffmpeg ingest server and create a channel for package sending
|
|
||||||
if config.ingest.enable {
|
|
||||||
thread::spawn(move || ingest_to_hls_server(channel_mgr_2));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut error_count = 0;
|
|
||||||
|
|
||||||
for node in get_source {
|
|
||||||
*current_media.lock().unwrap() = Some(node.clone());
|
|
||||||
let ignore = config.logging.ignore_lines.clone();
|
|
||||||
let timer = SystemTime::now();
|
|
||||||
|
|
||||||
if is_terminated.load(Ordering::SeqCst) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut cmd = match &node.cmd {
|
|
||||||
Some(cmd) => cmd.clone(),
|
|
||||||
None => break,
|
|
||||||
};
|
|
||||||
|
|
||||||
if !node.process.unwrap() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
info!(target: Target::file_mail(), channel = id;
|
|
||||||
"Play for <yellow>{}</>: <b><magenta>{}</></b>",
|
|
||||||
sec_to_time(node.out - node.seek),
|
|
||||||
node.source
|
|
||||||
);
|
|
||||||
|
|
||||||
if config.task.enable {
|
|
||||||
if config.task.path.is_file() {
|
|
||||||
let channel_mgr_3 = manager.clone();
|
|
||||||
|
|
||||||
thread::spawn(move || task_runner::run(channel_mgr_3));
|
|
||||||
} else {
|
|
||||||
error!(target: Target::file_mail(), channel = id;
|
|
||||||
"<bright-blue>{:?}</> executable not exists!",
|
|
||||||
config.task.path
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut dec_prefix = vec_strings!["-hide_banner", "-nostats", "-v", &ff_log_format];
|
|
||||||
|
|
||||||
if let Some(decoder_input_cmd) = &config.advanced.decoder.input_cmd {
|
|
||||||
dec_prefix.append(&mut decoder_input_cmd.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut read_rate = 1.0;
|
|
||||||
|
|
||||||
if let Some(begin) = &node.begin {
|
|
||||||
let (delta, _) = get_delta(&config, begin);
|
|
||||||
let duration = node.out - node.seek;
|
|
||||||
let speed = duration / (duration + delta);
|
|
||||||
|
|
||||||
if node.seek == 0.0
|
|
||||||
&& speed > 0.0
|
|
||||||
&& speed < 1.3
|
|
||||||
&& delta < config.general.stop_threshold
|
|
||||||
{
|
|
||||||
read_rate = speed;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dec_prefix.append(&mut vec_strings!["-readrate", read_rate]);
|
|
||||||
|
|
||||||
dec_prefix.append(&mut cmd);
|
|
||||||
let dec_cmd = prepare_output_cmd(&config, dec_prefix, &node.filter);
|
|
||||||
|
|
||||||
debug!(target: Target::file_mail(), channel = id;
|
|
||||||
"HLS writer CMD: <bright-blue>\"ffmpeg {}\"</>",
|
|
||||||
dec_cmd.join(" ")
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut dec_proc = match Command::new("ffmpeg")
|
|
||||||
.args(dec_cmd)
|
|
||||||
.stderr(Stdio::piped())
|
|
||||||
.spawn()
|
|
||||||
{
|
|
||||||
Ok(proc) => proc,
|
|
||||||
Err(e) => {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "couldn't spawn ffmpeg process: {e}");
|
|
||||||
panic!("couldn't spawn ffmpeg process: {e}")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let dec_err = BufReader::new(dec_proc.stderr.take().unwrap());
|
|
||||||
*manager.decoder.lock().unwrap() = Some(dec_proc);
|
|
||||||
|
|
||||||
if let Err(e) = stderr_reader(dec_err, ignore, Decoder, manager.clone()) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e:?}")
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(e) = manager.wait(Decoder) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e}");
|
|
||||||
}
|
|
||||||
|
|
||||||
while ingest_is_running.load(Ordering::SeqCst) {
|
|
||||||
sleep(Duration::from_secs(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(elapsed) = timer.elapsed() {
|
|
||||||
if elapsed.as_millis() < 300 {
|
|
||||||
error_count += 1;
|
|
||||||
|
|
||||||
if error_count > 10 {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "Reach fatal error count, terminate channel!");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error_count = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sleep(Duration::from_secs(1));
|
|
||||||
|
|
||||||
manager.stop_all();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,306 +0,0 @@
|
|||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use serde_with::{serde_as, NoneAsEmptyString};
|
|
||||||
use shlex::split;
|
|
||||||
use sqlx::{Pool, Sqlite};
|
|
||||||
use tokio::io::AsyncReadExt;
|
|
||||||
use ts_rs::TS;
|
|
||||||
|
|
||||||
use crate::db::{handles, models::AdvancedConfiguration};
|
|
||||||
use crate::utils::ServiceError;
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
|
|
||||||
#[ts(export, export_to = "advanced_config.d.ts")]
|
|
||||||
pub struct AdvancedConfig {
|
|
||||||
pub decoder: DecoderConfig,
|
|
||||||
pub encoder: EncoderConfig,
|
|
||||||
pub filter: FilterConfig,
|
|
||||||
pub ingest: IngestConfig,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[serde_as]
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
|
|
||||||
#[ts(export, export_to = "advanced_config.d.ts")]
|
|
||||||
pub struct DecoderConfig {
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub input_param: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub output_param: Option<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub input_cmd: Option<Vec<String>>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub output_cmd: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[serde_as]
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
|
|
||||||
#[ts(export, export_to = "advanced_config.d.ts")]
|
|
||||||
pub struct EncoderConfig {
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub input_param: Option<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub input_cmd: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[serde_as]
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
|
|
||||||
#[ts(export, export_to = "advanced_config.d.ts")]
|
|
||||||
pub struct IngestConfig {
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub input_param: Option<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub input_cmd: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[serde_as]
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, TS)]
|
|
||||||
#[ts(export, export_to = "advanced_config.d.ts")]
|
|
||||||
pub struct FilterConfig {
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub deinterlace: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub pad_scale_w: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub pad_scale_h: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub pad_video: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub fps: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub scale: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub set_dar: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub fade_in: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub fade_out: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub overlay_logo_scale: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub overlay_logo_fade_in: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub overlay_logo_fade_out: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub overlay_logo: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub tpad: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub drawtext_from_file: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub drawtext_from_zmq: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub aevalsrc: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub afade_in: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub afade_out: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub apad: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub volume: Option<String>,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
#[serde_as(as = "NoneAsEmptyString")]
|
|
||||||
pub split: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AdvancedConfig {
|
|
||||||
pub fn new(config: AdvancedConfiguration) -> Self {
|
|
||||||
Self {
|
|
||||||
decoder: DecoderConfig {
|
|
||||||
input_param: config.decoder_input_param.clone(),
|
|
||||||
output_param: config.decoder_output_param.clone(),
|
|
||||||
input_cmd: match config.decoder_input_param {
|
|
||||||
Some(input_param) => split(&input_param),
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
output_cmd: match config.decoder_output_param {
|
|
||||||
Some(output_param) => split(&output_param),
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
encoder: EncoderConfig {
|
|
||||||
input_param: config.encoder_input_param.clone(),
|
|
||||||
input_cmd: match config.encoder_input_param {
|
|
||||||
Some(input_param) => split(&input_param),
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
filter: FilterConfig {
|
|
||||||
deinterlace: config.filter_deinterlace,
|
|
||||||
pad_scale_w: config.filter_pad_scale_w,
|
|
||||||
pad_scale_h: config.filter_pad_scale_h,
|
|
||||||
pad_video: config.filter_pad_video,
|
|
||||||
fps: config.filter_fps,
|
|
||||||
scale: config.filter_scale,
|
|
||||||
set_dar: config.filter_set_dar,
|
|
||||||
fade_in: config.filter_fade_in,
|
|
||||||
fade_out: config.filter_fade_out,
|
|
||||||
overlay_logo_scale: config.filter_overlay_logo_scale,
|
|
||||||
overlay_logo_fade_in: config.filter_overlay_logo_fade_in,
|
|
||||||
overlay_logo_fade_out: config.filter_overlay_logo_fade_out,
|
|
||||||
overlay_logo: config.filter_overlay_logo,
|
|
||||||
tpad: config.filter_tpad,
|
|
||||||
drawtext_from_file: config.filter_drawtext_from_file,
|
|
||||||
drawtext_from_zmq: config.filter_drawtext_from_zmq,
|
|
||||||
aevalsrc: config.filter_aevalsrc,
|
|
||||||
afade_in: config.filter_afade_in,
|
|
||||||
afade_out: config.filter_afade_out,
|
|
||||||
apad: config.filter_apad,
|
|
||||||
volume: config.filter_volume,
|
|
||||||
split: config.filter_split,
|
|
||||||
},
|
|
||||||
ingest: IngestConfig {
|
|
||||||
input_param: config.ingest_input_param.clone(),
|
|
||||||
input_cmd: match config.ingest_input_param {
|
|
||||||
Some(input_param) => split(&input_param),
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn dump(pool: &Pool<Sqlite>, id: i32) -> Result<(), ServiceError> {
|
|
||||||
let config = Self::new(handles::select_advanced_configuration(pool, id).await?);
|
|
||||||
let f_keys = [
|
|
||||||
"deinterlace",
|
|
||||||
"pad_scale_w",
|
|
||||||
"pad_scale_h",
|
|
||||||
"pad_video",
|
|
||||||
"fps",
|
|
||||||
"scale",
|
|
||||||
"set_dar",
|
|
||||||
"fade_in",
|
|
||||||
"fade_out",
|
|
||||||
"overlay_logo_scale",
|
|
||||||
"overlay_logo_fade_in",
|
|
||||||
"overlay_logo_fade_out",
|
|
||||||
"overlay_logo",
|
|
||||||
"tpad",
|
|
||||||
"drawtext_from_file",
|
|
||||||
"drawtext_from_zmq",
|
|
||||||
"aevalsrc",
|
|
||||||
"afade_in",
|
|
||||||
"afade_out",
|
|
||||||
"apad",
|
|
||||||
"volume",
|
|
||||||
"split",
|
|
||||||
];
|
|
||||||
|
|
||||||
let toml_string = toml_edit::ser::to_string_pretty(&config)?;
|
|
||||||
let mut doc = toml_string.parse::<toml_edit::DocumentMut>()?;
|
|
||||||
|
|
||||||
if let Some(decoder) = doc.get_mut("decoder").and_then(|o| o.as_table_mut()) {
|
|
||||||
decoder
|
|
||||||
.decor_mut()
|
|
||||||
.set_prefix("# Changing these settings is for advanced users only!\n# There will be no support or guarantee that it will be stable after changing them.\n\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(output_param) = doc
|
|
||||||
.get_mut("decoder")
|
|
||||||
.and_then(|d| d.get_mut("output_param"))
|
|
||||||
.and_then(|o| o.as_value_mut())
|
|
||||||
{
|
|
||||||
output_param
|
|
||||||
.decor_mut()
|
|
||||||
.set_suffix(" # get also applied to ingest instance.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(filter) = doc.get_mut("filter") {
|
|
||||||
for key in &f_keys {
|
|
||||||
if let Some(item) = filter.get_mut(*key).and_then(|o| o.as_value_mut()) {
|
|
||||||
match *key {
|
|
||||||
"deinterlace" => item.decor_mut().set_suffix(" # yadif=0:-1:0"),
|
|
||||||
"pad_scale_w" => item.decor_mut().set_suffix(" # scale={}:-1"),
|
|
||||||
"pad_scale_h" => item.decor_mut().set_suffix(" # scale=-1:{}"),
|
|
||||||
"pad_video" => item.decor_mut().set_suffix(
|
|
||||||
" # pad=max(iw\\,ih*({0}/{1})):ow/({0}/{1}):(ow-iw)/2:(oh-ih)/2",
|
|
||||||
),
|
|
||||||
"fps" => item.decor_mut().set_suffix(" # fps={}"),
|
|
||||||
"scale" => item.decor_mut().set_suffix(" # scale={}:{}"),
|
|
||||||
"set_dar" => item.decor_mut().set_suffix(" # setdar=dar={}"),
|
|
||||||
"fade_in" => item.decor_mut().set_suffix(" # fade=in:st=0:d=0.5"),
|
|
||||||
"fade_out" => item.decor_mut().set_suffix(" # fade=out:st={}:d=1.0"),
|
|
||||||
"overlay_logo_scale" => item.decor_mut().set_suffix(" # scale={}"),
|
|
||||||
"overlay_logo_fade_in" => {
|
|
||||||
item.decor_mut().set_suffix(" # fade=in:st=0:d=1.0:alpha=1")
|
|
||||||
}
|
|
||||||
"overlay_logo_fade_out" => item
|
|
||||||
.decor_mut()
|
|
||||||
.set_suffix(" # fade=out:st={}:d=1.0:alpha=1"),
|
|
||||||
"overlay_logo" => item
|
|
||||||
.decor_mut()
|
|
||||||
.set_suffix(" # null[l];[v][l]overlay={}:shortest=1"),
|
|
||||||
"tpad" => item
|
|
||||||
.decor_mut()
|
|
||||||
.set_suffix(" # tpad=stop_mode=add:stop_duration={}"),
|
|
||||||
"drawtext_from_file" => {
|
|
||||||
item.decor_mut().set_suffix(" # drawtext=text='{}':{}{}")
|
|
||||||
}
|
|
||||||
"drawtext_from_zmq" => item
|
|
||||||
.decor_mut()
|
|
||||||
.set_suffix(" # zmq=b=tcp\\\\://'{}',drawtext@dyntext={}"),
|
|
||||||
"aevalsrc" => item.decor_mut().set_suffix(
|
|
||||||
" # aevalsrc=0:channel_layout=stereo:duration={}:sample_rate=48000",
|
|
||||||
),
|
|
||||||
"afade_in" => item.decor_mut().set_suffix(" # afade=in:st=0:d=0.5"),
|
|
||||||
"afade_out" => item.decor_mut().set_suffix(" # afade=out:st={}:d=1.0"),
|
|
||||||
"apad" => item.decor_mut().set_suffix(" # apad=whole_dur={}"),
|
|
||||||
"volume" => item.decor_mut().set_suffix(" # volume={}"),
|
|
||||||
"split" => item.decor_mut().set_suffix(" # split={}{}"),
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
tokio::fs::write(&format!("advanced_{id}.toml"), doc.to_string()).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn import(pool: &Pool<Sqlite>, id: i32, path: &Path) -> Result<(), ServiceError> {
|
|
||||||
if path.is_file() {
|
|
||||||
let mut file = tokio::fs::File::open(path).await?;
|
|
||||||
let mut contents = String::new();
|
|
||||||
file.read_to_string(&mut contents).await?;
|
|
||||||
|
|
||||||
let config: Self = toml_edit::de::from_str(&contents).unwrap();
|
|
||||||
|
|
||||||
handles::update_advanced_configuration(pool, id, config).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ServiceError::BadRequest("Path not exists!".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,595 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io::{stdin, stdout, Write},
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
use std::os::unix::fs::MetadataExt;
|
|
||||||
|
|
||||||
use clap::Parser;
|
|
||||||
use rpassword::read_password;
|
|
||||||
use sqlx::{Pool, Sqlite};
|
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
use tokio::fs;
|
|
||||||
|
|
||||||
use crate::db::{
|
|
||||||
handles,
|
|
||||||
models::{Channel, User},
|
|
||||||
};
|
|
||||||
use crate::utils::{
|
|
||||||
advanced_config::AdvancedConfig,
|
|
||||||
config::{OutputMode, PlayoutConfig},
|
|
||||||
copy_assets,
|
|
||||||
};
|
|
||||||
use crate::ARGS;
|
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
use crate::utils::db_path;
|
|
||||||
|
|
||||||
#[derive(Parser, Debug, Clone)]
|
|
||||||
#[clap(version,
|
|
||||||
about = "ffplayout - 24/7 broadcasting solution",
|
|
||||||
long_about = Some("ffplayout - 24/7 broadcasting solution\n
|
|
||||||
Stream dynamic playlists or folder contents with the power of ffmpeg.
|
|
||||||
The target can be an HLS playlist, rtmp/srt/udp server, desktop player
|
|
||||||
or any other output supported by ffmpeg.\n
|
|
||||||
ffplayout also provides a web frontend and API to control streaming,
|
|
||||||
manage config, files, text overlay, etc."),
|
|
||||||
next_line_help = false,
|
|
||||||
)]
|
|
||||||
pub struct Args {
|
|
||||||
#[clap(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help_heading = Some("Initial Setup"),
|
|
||||||
help = "Initialize defaults: global admin, paths, settings, etc."
|
|
||||||
)]
|
|
||||||
pub init: bool,
|
|
||||||
|
|
||||||
#[clap(short, long, help_heading = Some("Initial Setup"), help = "Create admin user")]
|
|
||||||
pub username: Option<String>,
|
|
||||||
|
|
||||||
#[clap(short, long, help_heading = Some("Initial Setup"), help = "Admin mail address")]
|
|
||||||
pub mail: Option<String>,
|
|
||||||
|
|
||||||
#[clap(short, long, help_heading = Some("Initial Setup"), help = "Admin password")]
|
|
||||||
pub password: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Storage root path")]
|
|
||||||
pub storage: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Initial Setup"), help = "SMTP server for system mails")]
|
|
||||||
pub mail_smtp: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Mail user for system mails")]
|
|
||||||
pub mail_user: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Mail password for system mails")]
|
|
||||||
pub mail_password: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Initial Setup"), help = "Use TLS for system mails")]
|
|
||||||
pub mail_starttls: bool,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Initial Setup / General"), help = "Logging path")]
|
|
||||||
pub logs: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Initial Setup / General"), help = "Path to public files, also HLS playlists")]
|
|
||||||
pub public: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("Initial Setup / Playlist"), help = "Path to playlist, or playlist root folder.")]
|
|
||||||
pub playlists: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("General"), help = "Add or update a global admin use")]
|
|
||||||
pub user_set: bool,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("General"), help = "Path to database file")]
|
|
||||||
pub db: Option<PathBuf>,
|
|
||||||
|
|
||||||
#[clap(
|
|
||||||
long,
|
|
||||||
help_heading = Some("General"),
|
|
||||||
help = "Drop database. WARNING: this will delete all configurations!"
|
|
||||||
)]
|
|
||||||
pub drop_db: bool,
|
|
||||||
|
|
||||||
#[clap(
|
|
||||||
long,
|
|
||||||
help_heading = Some("General"),
|
|
||||||
help = "Dump advanced channel configuration to advanced_{channel}.toml"
|
|
||||||
)]
|
|
||||||
pub dump_advanced: bool,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("General"), help = "Dump channel configuration to ffplayout_{channel}.toml")]
|
|
||||||
pub dump_config: bool,
|
|
||||||
|
|
||||||
#[clap(
|
|
||||||
long,
|
|
||||||
help_heading = Some("General"),
|
|
||||||
help = "import advanced channel configuration from file."
|
|
||||||
)]
|
|
||||||
pub import_advanced: Option<PathBuf>,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("General"), help = "import channel configuration from file.")]
|
|
||||||
pub import_config: Option<PathBuf>,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("General"), help = "List available channel ids")]
|
|
||||||
pub list_channels: bool,
|
|
||||||
|
|
||||||
#[clap(short, env, long, help_heading = Some("General"), help = "Listen on IP:PORT, like: 127.0.0.1:8787")]
|
|
||||||
pub listen: Option<String>,
|
|
||||||
|
|
||||||
#[clap(
|
|
||||||
long,
|
|
||||||
env,
|
|
||||||
help_heading = Some("General"),
|
|
||||||
help = "Override logging level: trace, debug, println, warn, eprintln"
|
|
||||||
)]
|
|
||||||
pub log_level: Option<String>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("General"), help = "Log to console")]
|
|
||||||
pub log_to_console: bool,
|
|
||||||
|
|
||||||
#[clap(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
env,
|
|
||||||
help_heading = Some("General / Playout"),
|
|
||||||
help = "Channels by ids to process (for export config, generate playlist, foreground running, etc.)",
|
|
||||||
num_args = 1..,
|
|
||||||
)]
|
|
||||||
pub channels: Option<Vec<i32>>,
|
|
||||||
|
|
||||||
#[clap(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help_heading = Some("Playlist"),
|
|
||||||
help = "Generate playlist for dates, like: 2022-01-01 - 2022-01-10",
|
|
||||||
name = "YYYY-MM-DD",
|
|
||||||
num_args = 1..,
|
|
||||||
)]
|
|
||||||
pub generate: Option<Vec<String>>,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("Playlist"), help = "Optional path list for playlist generations", num_args = 1..)]
|
|
||||||
pub paths: Option<Vec<PathBuf>>,
|
|
||||||
|
|
||||||
#[clap(
|
|
||||||
short,
|
|
||||||
long,
|
|
||||||
help_heading = Some("Playlist"),
|
|
||||||
help = "Start time in 'hh:mm:ss', 'now' for start with first"
|
|
||||||
)]
|
|
||||||
pub start: Option<String>,
|
|
||||||
|
|
||||||
#[clap(short = 'T', long, help_heading = Some("Playlist"), help = "JSON template file for generating playlist")]
|
|
||||||
pub template: Option<PathBuf>,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("Playlist"), help = "Only validate given playlist")]
|
|
||||||
pub validate: bool,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Playout"), help = "Run playout without webserver and frontend")]
|
|
||||||
pub foreground: bool,
|
|
||||||
|
|
||||||
#[clap(short, long, help_heading = Some("Playout"), help = "Play folder content")]
|
|
||||||
pub folder: Option<PathBuf>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Playout"), help = "Keep log file for given days")]
|
|
||||||
pub log_backup_count: Option<usize>,
|
|
||||||
|
|
||||||
#[clap(long, env, help_heading = Some("Playout"), help = "Add timestamp to log line")]
|
|
||||||
pub log_timestamp: bool,
|
|
||||||
|
|
||||||
#[clap(short, long, help_heading = Some("Playout"), help = "Set output mode: desktop, hls, null, stream")]
|
|
||||||
pub output: Option<OutputMode>,
|
|
||||||
|
|
||||||
#[clap(short, long, help_heading = Some("Playout"), help = "Set audio volume")]
|
|
||||||
pub volume: Option<f64>,
|
|
||||||
|
|
||||||
#[clap(long, help_heading = Some("Playout"), help = "Skip validation process")]
|
|
||||||
pub skip_validation: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn global_user(args: &mut Args) {
|
|
||||||
let mut user = String::new();
|
|
||||||
let mut mail = String::new();
|
|
||||||
|
|
||||||
if args.username.is_none() {
|
|
||||||
print!("Global admin: ");
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut user)
|
|
||||||
.expect("Did not enter a correct name?");
|
|
||||||
|
|
||||||
args.username = Some(user.trim().to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if args.password.is_none() {
|
|
||||||
print!("Password: ");
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
let password = read_password();
|
|
||||||
|
|
||||||
args.password = password.ok();
|
|
||||||
}
|
|
||||||
|
|
||||||
if args.mail.is_none() {
|
|
||||||
print!("Mail: ");
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut mail)
|
|
||||||
.expect("Did not enter a correct name?");
|
|
||||||
|
|
||||||
args.mail = Some(mail.trim().to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn run_args(pool: &Pool<Sqlite>) -> Result<(), i32> {
|
|
||||||
let mut args = ARGS.clone();
|
|
||||||
|
|
||||||
if !args.dump_advanced && !args.dump_config && !args.drop_db {
|
|
||||||
if let Err(e) = handles::db_migrate(pool).await {
|
|
||||||
panic!("{e}");
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let channels = handles::select_related_channels(pool, None)
|
|
||||||
.await
|
|
||||||
.unwrap_or(vec![Channel::default()]);
|
|
||||||
|
|
||||||
let mut error_code = -1;
|
|
||||||
|
|
||||||
if args.init {
|
|
||||||
let check_user = handles::select_users(pool).await;
|
|
||||||
|
|
||||||
let mut storage = String::new();
|
|
||||||
let mut playlist = String::new();
|
|
||||||
let mut logging = String::new();
|
|
||||||
let mut public = String::new();
|
|
||||||
let mut mail_smtp = String::new();
|
|
||||||
let mut mail_user = String::new();
|
|
||||||
let mut mail_starttls = String::new();
|
|
||||||
let mut global = handles::select_global(pool).await.map_err(|_| 1)?;
|
|
||||||
|
|
||||||
if check_user.unwrap_or_default().is_empty() {
|
|
||||||
global_user(&mut args);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(st) = args.storage {
|
|
||||||
global.storage = st;
|
|
||||||
} else {
|
|
||||||
print!("Storage path [{}]: ", global.storage);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut storage)
|
|
||||||
.expect("Did not enter a correct path?");
|
|
||||||
|
|
||||||
if !storage.trim().is_empty() {
|
|
||||||
global.storage = storage
|
|
||||||
.trim()
|
|
||||||
.trim_matches(|c| c == '"' || c == '\'')
|
|
||||||
.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(pl) = args.playlists {
|
|
||||||
global.playlists = pl
|
|
||||||
} else {
|
|
||||||
print!("Playlist path [{}]: ", global.playlists);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut playlist)
|
|
||||||
.expect("Did not enter a correct path?");
|
|
||||||
|
|
||||||
if !playlist.trim().is_empty() {
|
|
||||||
global.playlists = playlist
|
|
||||||
.trim()
|
|
||||||
.trim_matches(|c| c == '"' || c == '\'')
|
|
||||||
.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(lp) = args.logs {
|
|
||||||
global.logs = lp;
|
|
||||||
} else {
|
|
||||||
print!("Logging path [{}]: ", global.logs);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut logging)
|
|
||||||
.expect("Did not enter a correct path?");
|
|
||||||
|
|
||||||
if !logging.trim().is_empty() {
|
|
||||||
global.logs = logging
|
|
||||||
.trim()
|
|
||||||
.trim_matches(|c| c == '"' || c == '\'')
|
|
||||||
.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(p) = args.public {
|
|
||||||
global.public = p;
|
|
||||||
} else {
|
|
||||||
print!("Public (HLS) path [{}]: ", global.public);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut public)
|
|
||||||
.expect("Did not enter a correct path?");
|
|
||||||
|
|
||||||
if !public.trim().is_empty() {
|
|
||||||
global.public = public
|
|
||||||
.trim()
|
|
||||||
.trim_matches(|c| c == '"' || c == '\'')
|
|
||||||
.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(smtp) = args.mail_smtp {
|
|
||||||
global.mail_smtp = smtp;
|
|
||||||
} else {
|
|
||||||
print!("SMTP server [{}]: ", global.mail_smtp);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut mail_smtp)
|
|
||||||
.expect("Did not enter a correct SMTP server?");
|
|
||||||
|
|
||||||
if !mail_smtp.trim().is_empty() {
|
|
||||||
global.mail_smtp = mail_smtp
|
|
||||||
.trim()
|
|
||||||
.trim_matches(|c| c == '"' || c == '\'')
|
|
||||||
.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(user) = args.mail_user {
|
|
||||||
global.mail_user = user;
|
|
||||||
} else {
|
|
||||||
print!("SMTP user [{}]: ", global.mail_user);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut mail_user)
|
|
||||||
.expect("Did not enter a correct SMTP user?");
|
|
||||||
|
|
||||||
if !mail_user.trim().is_empty() {
|
|
||||||
global.mail_user = mail_user
|
|
||||||
.trim()
|
|
||||||
.trim_matches(|c| c == '"' || c == '\'')
|
|
||||||
.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(pass) = args.mail_password {
|
|
||||||
global.mail_password = pass;
|
|
||||||
} else {
|
|
||||||
print!(
|
|
||||||
"SMTP password [{}]: ",
|
|
||||||
if global.mail_password.is_empty() {
|
|
||||||
""
|
|
||||||
} else {
|
|
||||||
"********"
|
|
||||||
}
|
|
||||||
);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
let password = read_password().unwrap_or_default();
|
|
||||||
|
|
||||||
if !password.trim().is_empty() {
|
|
||||||
global.mail_password = password.trim().to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if args.mail_starttls {
|
|
||||||
global.mail_starttls = true;
|
|
||||||
} else {
|
|
||||||
print!(
|
|
||||||
"SMTP use TLS [{}]: ",
|
|
||||||
if global.mail_starttls { "yes" } else { "no" }
|
|
||||||
);
|
|
||||||
stdout().flush().unwrap();
|
|
||||||
stdin()
|
|
||||||
.read_line(&mut mail_starttls)
|
|
||||||
.expect("Did not enter a yes or no?");
|
|
||||||
|
|
||||||
if !mail_starttls.trim().is_empty() {
|
|
||||||
global.mail_starttls = mail_starttls.trim().to_lowercase().starts_with('y');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(e) = handles::update_global(pool, global.clone()).await {
|
|
||||||
eprintln!("{e}");
|
|
||||||
error_code = 1;
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut channel = handles::select_channel(pool, &1).await.unwrap();
|
|
||||||
channel.public = global.public;
|
|
||||||
channel.playlists = global.playlists;
|
|
||||||
channel.storage = global.storage;
|
|
||||||
|
|
||||||
let mut storage_path = PathBuf::from(channel.storage.clone());
|
|
||||||
|
|
||||||
if global.shared {
|
|
||||||
storage_path = storage_path.join("1");
|
|
||||||
|
|
||||||
channel.public = Path::new(&channel.public)
|
|
||||||
.join("1")
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
channel.playlists = Path::new(&channel.playlists)
|
|
||||||
.join("1")
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
channel.storage = storage_path.to_string_lossy().to_string();
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(e) = copy_assets(&storage_path).await {
|
|
||||||
eprintln!("{e}");
|
|
||||||
};
|
|
||||||
|
|
||||||
handles::update_channel(pool, 1, channel).await.unwrap();
|
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
{
|
|
||||||
update_permissions().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("\nSet global settings done...");
|
|
||||||
} else if args.user_set {
|
|
||||||
global_user(&mut args);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(username) = args.username {
|
|
||||||
error_code = 0;
|
|
||||||
|
|
||||||
let chl: Vec<i32> = channels.clone().iter().map(|c| c.id).collect();
|
|
||||||
|
|
||||||
let ff_user = User {
|
|
||||||
id: 0,
|
|
||||||
mail: Some(args.mail.unwrap()),
|
|
||||||
username: username.clone(),
|
|
||||||
password: args.password.unwrap(),
|
|
||||||
role_id: Some(1),
|
|
||||||
channel_ids: Some(chl.clone()),
|
|
||||||
token: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(e) = handles::insert_or_update_user(pool, ff_user).await {
|
|
||||||
eprintln!("{e}");
|
|
||||||
error_code = 1;
|
|
||||||
};
|
|
||||||
|
|
||||||
println!("Create/update global admin user \"{username}\" done...");
|
|
||||||
}
|
|
||||||
|
|
||||||
if ARGS.list_channels {
|
|
||||||
let chl = channels
|
|
||||||
.iter()
|
|
||||||
.map(|c| (c.id, c.name.clone()))
|
|
||||||
.collect::<Vec<(i32, String)>>();
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"Available channels:\n{}",
|
|
||||||
chl.iter()
|
|
||||||
.map(|(i, t)| format!(" {i}: '{t}'"))
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join("\n")
|
|
||||||
);
|
|
||||||
|
|
||||||
error_code = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ARGS.dump_advanced {
|
|
||||||
if let Some(channels) = &ARGS.channels {
|
|
||||||
for id in channels {
|
|
||||||
match AdvancedConfig::dump(pool, *id).await {
|
|
||||||
Ok(_) => {
|
|
||||||
println!("Dump config to: advanced_{id}.toml");
|
|
||||||
error_code = 0;
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Dump config: {e}");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ARGS.dump_config {
|
|
||||||
if let Some(channels) = &ARGS.channels {
|
|
||||||
for id in channels {
|
|
||||||
match PlayoutConfig::dump(pool, *id).await {
|
|
||||||
Ok(_) => {
|
|
||||||
println!("Dump config to: ffplayout_{id}.toml");
|
|
||||||
error_code = 0;
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Dump config: {e}");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(path) = &ARGS.import_advanced {
|
|
||||||
if let Some(channels) = &ARGS.channels {
|
|
||||||
for id in channels {
|
|
||||||
match AdvancedConfig::import(pool, *id, path).await {
|
|
||||||
Ok(_) => {
|
|
||||||
println!("Import config done...");
|
|
||||||
error_code = 0;
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("{e}");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(path) = &ARGS.import_config {
|
|
||||||
if let Some(channels) = &ARGS.channels {
|
|
||||||
for id in channels {
|
|
||||||
match PlayoutConfig::import(pool, *id, path).await {
|
|
||||||
Ok(_) => {
|
|
||||||
println!("Import config done...");
|
|
||||||
error_code = 0;
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("{e}");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eprintln!("Channel ID(s) needed! Use `--channels 1 ...`");
|
|
||||||
error_code = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if error_code > -1 {
|
|
||||||
Err(error_code)
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
async fn update_permissions() {
|
|
||||||
let db_path = Path::new(db_path().unwrap());
|
|
||||||
let uid = nix::unistd::Uid::current();
|
|
||||||
let parent_owner = db_path.parent().unwrap().metadata().unwrap().uid();
|
|
||||||
let user = nix::unistd::User::from_uid(parent_owner.into())
|
|
||||||
.unwrap_or_default()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
if uid.is_root() && uid.to_string() != parent_owner.to_string() {
|
|
||||||
println!("Adjust DB permission...");
|
|
||||||
|
|
||||||
let db = fs::canonicalize(db_path).await.unwrap();
|
|
||||||
let shm = fs::canonicalize(db_path.with_extension("db-shm"))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let wal = fs::canonicalize(db_path.with_extension("db-wal"))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
nix::unistd::chown(&db, Some(user.uid), Some(user.gid)).expect("Change DB owner");
|
|
||||||
|
|
||||||
if shm.is_file() {
|
|
||||||
nix::unistd::chown(&shm, Some(user.uid), Some(user.gid)).expect("Change DB-SHM owner");
|
|
||||||
}
|
|
||||||
|
|
||||||
if wal.is_file() {
|
|
||||||
nix::unistd::chown(&wal, Some(user.uid), Some(user.gid)).expect("Change DB-WAL owner");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,90 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io,
|
|
||||||
path::PathBuf,
|
|
||||||
sync::{Arc, Mutex},
|
|
||||||
};
|
|
||||||
|
|
||||||
use log::*;
|
|
||||||
use sqlx::{Pool, Sqlite};
|
|
||||||
|
|
||||||
use super::logging::MailQueue;
|
|
||||||
use crate::db::{handles, models::Channel};
|
|
||||||
use crate::player::controller::{ChannelController, ChannelManager};
|
|
||||||
use crate::utils::{config::get_config, copy_assets, errors::ServiceError};
|
|
||||||
|
|
||||||
async fn map_global_admins(conn: &Pool<Sqlite>) -> Result<(), ServiceError> {
|
|
||||||
let channels = handles::select_related_channels(conn, None).await?;
|
|
||||||
let admins = handles::select_global_admins(conn).await?;
|
|
||||||
|
|
||||||
for admin in admins {
|
|
||||||
if let Err(e) =
|
|
||||||
handles::insert_user_channel(conn, admin.id, channels.iter().map(|c| c.id).collect())
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
error!("Update global admin: {e}");
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn create_channel(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
controllers: Arc<Mutex<ChannelController>>,
|
|
||||||
queue: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>,
|
|
||||||
target_channel: Channel,
|
|
||||||
) -> Result<Channel, ServiceError> {
|
|
||||||
let channel = handles::insert_channel(conn, target_channel).await?;
|
|
||||||
|
|
||||||
handles::new_channel_presets(conn, channel.id).await?;
|
|
||||||
handles::update_channel(conn, channel.id, channel.clone()).await?;
|
|
||||||
|
|
||||||
let output_param = "-c:v libx264 -crf 23 -x264-params keyint=50:min-keyint=25:scenecut=-1 -maxrate 1300k -bufsize 2600k -preset faster -tune zerolatency -profile:v Main -level 3.1 -c:a aac -ar 44100 -b:a 128k -flags +cgop -f hls -hls_time 6 -hls_list_size 600 -hls_flags append_list+delete_segments+omit_endlist -hls_segment_filename live/stream-%d.ts live/stream.m3u8".to_string();
|
|
||||||
|
|
||||||
handles::insert_advanced_configuration(conn, channel.id).await?;
|
|
||||||
handles::insert_configuration(conn, channel.id, output_param).await?;
|
|
||||||
|
|
||||||
let config = get_config(conn, channel.id).await?;
|
|
||||||
let m_queue = Arc::new(Mutex::new(MailQueue::new(channel.id, config.mail.clone())));
|
|
||||||
let manager = ChannelManager::new(Some(conn.clone()), channel.clone(), config.clone());
|
|
||||||
|
|
||||||
if let Err(e) = copy_assets(&PathBuf::from(&config.storage.path)).await {
|
|
||||||
error!("{e}");
|
|
||||||
};
|
|
||||||
|
|
||||||
controllers
|
|
||||||
.lock()
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
|
|
||||||
.add(manager);
|
|
||||||
|
|
||||||
if let Ok(mut mqs) = queue.lock() {
|
|
||||||
mqs.push(m_queue.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
map_global_admins(conn).await?;
|
|
||||||
|
|
||||||
Ok(channel)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_channel(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
id: i32,
|
|
||||||
controllers: Arc<Mutex<ChannelController>>,
|
|
||||||
queue: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>,
|
|
||||||
) -> Result<(), ServiceError> {
|
|
||||||
let channel = handles::select_channel(conn, &id).await?;
|
|
||||||
handles::delete_channel(conn, &channel.id).await?;
|
|
||||||
|
|
||||||
controllers
|
|
||||||
.lock()
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?
|
|
||||||
.remove(id);
|
|
||||||
|
|
||||||
if let Ok(mut mqs) = queue.lock() {
|
|
||||||
mqs.retain(|q| q.lock().unwrap().id != id);
|
|
||||||
}
|
|
||||||
|
|
||||||
map_global_admins(conn).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,972 +0,0 @@
|
|||||||
use std::{
|
|
||||||
fmt,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use chrono::NaiveTime;
|
|
||||||
use flexi_logger::Level;
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use shlex::split;
|
|
||||||
use sqlx::{Pool, Sqlite};
|
|
||||||
use tokio::{fs, io::AsyncReadExt};
|
|
||||||
use ts_rs::TS;
|
|
||||||
|
|
||||||
use crate::db::{handles, models};
|
|
||||||
use crate::utils::{files::norm_abs_path, gen_tcp_socket, time_to_sec};
|
|
||||||
use crate::vec_strings;
|
|
||||||
use crate::AdvancedConfig;
|
|
||||||
use crate::ARGS;
|
|
||||||
|
|
||||||
use super::errors::ServiceError;
|
|
||||||
|
|
||||||
pub const DUMMY_LEN: f64 = 60.0;
|
|
||||||
pub const IMAGE_FORMAT: [&str; 21] = [
|
|
||||||
"bmp", "dds", "dpx", "exr", "gif", "hdr", "j2k", "jpg", "jpeg", "pcx", "pfm", "pgm", "phm",
|
|
||||||
"png", "psd", "ppm", "sgi", "svg", "tga", "tif", "webp",
|
|
||||||
];
|
|
||||||
|
|
||||||
// Some well known errors can be safely ignore
|
|
||||||
pub const FFMPEG_IGNORE_ERRORS: [&str; 13] = [
|
|
||||||
"ac-tex damaged",
|
|
||||||
"codec s302m, is muxed as a private data stream",
|
|
||||||
"corrupt decoded frame in stream",
|
|
||||||
"corrupt input packet in stream",
|
|
||||||
"end mismatch left",
|
|
||||||
"Invalid mb type in I-frame at",
|
|
||||||
"Packet corrupt",
|
|
||||||
"Referenced QT chapter track not found",
|
|
||||||
"skipped MB in I-frame at",
|
|
||||||
"Thread message queue blocking",
|
|
||||||
"timestamp discontinuity",
|
|
||||||
"Warning MVs not available",
|
|
||||||
"frame size not set",
|
|
||||||
];
|
|
||||||
|
|
||||||
pub const FFMPEG_UNRECOVERABLE_ERRORS: [&str; 6] = [
|
|
||||||
"Address already in use",
|
|
||||||
"Invalid argument",
|
|
||||||
"Numerical result",
|
|
||||||
"Error initializing complex filters",
|
|
||||||
"Error while decoding stream #0:0: Invalid data found when processing input",
|
|
||||||
"Unrecognized option",
|
|
||||||
];
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
#[serde(rename_all = "lowercase")]
|
|
||||||
pub enum OutputMode {
|
|
||||||
Desktop,
|
|
||||||
HLS,
|
|
||||||
Null,
|
|
||||||
Stream,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OutputMode {
|
|
||||||
fn new(s: &str) -> Self {
|
|
||||||
match s {
|
|
||||||
"desktop" => Self::Desktop,
|
|
||||||
"null" => Self::Null,
|
|
||||||
"stream" => Self::Stream,
|
|
||||||
_ => Self::HLS,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for OutputMode {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::HLS
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for OutputMode {
|
|
||||||
type Err = String;
|
|
||||||
|
|
||||||
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
|
||||||
match input {
|
|
||||||
"desktop" => Ok(Self::Desktop),
|
|
||||||
"hls" => Ok(Self::HLS),
|
|
||||||
"null" => Ok(Self::Null),
|
|
||||||
"stream" => Ok(Self::Stream),
|
|
||||||
_ => Err("Use 'desktop', 'hls', 'null' or 'stream'".to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for OutputMode {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
OutputMode::Desktop => write!(f, "desktop"),
|
|
||||||
OutputMode::HLS => write!(f, "hls"),
|
|
||||||
OutputMode::Null => write!(f, "null"),
|
|
||||||
OutputMode::Stream => write!(f, "stream"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, Eq, PartialEq, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
#[serde(rename_all = "lowercase")]
|
|
||||||
pub enum ProcessMode {
|
|
||||||
Folder,
|
|
||||||
#[default]
|
|
||||||
Playlist,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ProcessMode {
|
|
||||||
fn new(s: &str) -> Self {
|
|
||||||
match s {
|
|
||||||
"folder" => Self::Folder,
|
|
||||||
_ => Self::Playlist,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for ProcessMode {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
ProcessMode::Folder => write!(f, "folder"),
|
|
||||||
ProcessMode::Playlist => write!(f, "playlist"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for ProcessMode {
|
|
||||||
type Err = String;
|
|
||||||
|
|
||||||
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
|
||||||
match input {
|
|
||||||
"folder" => Ok(Self::Folder),
|
|
||||||
"playlist" => Ok(Self::Playlist),
|
|
||||||
_ => Err("Use 'folder' or 'playlist'".to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)]
|
|
||||||
pub struct Template {
|
|
||||||
pub sources: Vec<Source>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)]
|
|
||||||
pub struct Source {
|
|
||||||
#[ts(type = "string")]
|
|
||||||
pub start: NaiveTime,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
pub duration: NaiveTime,
|
|
||||||
pub shuffle: bool,
|
|
||||||
pub paths: Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Channel Config
|
|
||||||
///
|
|
||||||
/// This we init ones, when ffplayout is starting and use them globally in the hole program.
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct PlayoutConfig {
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub channel: Channel,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub advanced: AdvancedConfig,
|
|
||||||
pub general: General,
|
|
||||||
pub mail: Mail,
|
|
||||||
pub logging: Logging,
|
|
||||||
pub processing: Processing,
|
|
||||||
pub ingest: Ingest,
|
|
||||||
pub playlist: Playlist,
|
|
||||||
pub storage: Storage,
|
|
||||||
pub text: Text,
|
|
||||||
pub task: Task,
|
|
||||||
#[serde(alias = "out")]
|
|
||||||
pub output: Output,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
pub struct Channel {
|
|
||||||
pub logs: PathBuf,
|
|
||||||
pub public: PathBuf,
|
|
||||||
pub playlists: PathBuf,
|
|
||||||
pub storage: PathBuf,
|
|
||||||
pub shared: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Channel {
|
|
||||||
pub fn new(config: &models::GlobalSettings, channel: models::Channel) -> Self {
|
|
||||||
Self {
|
|
||||||
logs: PathBuf::from(config.logs.clone()),
|
|
||||||
public: PathBuf::from(channel.public.clone()),
|
|
||||||
playlists: PathBuf::from(channel.playlists.clone()),
|
|
||||||
storage: PathBuf::from(channel.storage.clone()),
|
|
||||||
shared: config.shared,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct General {
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub id: i32,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub channel_id: i32,
|
|
||||||
pub stop_threshold: f64,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub generate: Option<Vec<String>>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub ffmpeg_filters: Vec<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub ffmpeg_libs: Vec<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub template: Option<Template>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub skip_validation: bool,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub validate: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl General {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
id: config.id,
|
|
||||||
channel_id: config.channel_id,
|
|
||||||
stop_threshold: config.general_stop_threshold,
|
|
||||||
generate: None,
|
|
||||||
ffmpeg_filters: vec![],
|
|
||||||
ffmpeg_libs: vec![],
|
|
||||||
template: None,
|
|
||||||
skip_validation: false,
|
|
||||||
validate: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Mail {
|
|
||||||
#[serde(skip_deserializing)]
|
|
||||||
pub show: bool,
|
|
||||||
pub subject: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub smtp_server: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub starttls: bool,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub sender_addr: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub sender_pass: String,
|
|
||||||
pub recipient: String,
|
|
||||||
#[ts(type = "string")]
|
|
||||||
pub mail_level: Level,
|
|
||||||
pub interval: i64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mail {
|
|
||||||
fn new(global: &models::GlobalSettings, config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
show: !global.mail_password.is_empty() && global.mail_smtp != "mail.example.org",
|
|
||||||
subject: config.mail_subject.clone(),
|
|
||||||
smtp_server: global.mail_smtp.clone(),
|
|
||||||
starttls: global.mail_starttls,
|
|
||||||
sender_addr: global.mail_user.clone(),
|
|
||||||
sender_pass: global.mail_password.clone(),
|
|
||||||
recipient: config.mail_recipient.clone(),
|
|
||||||
mail_level: string_to_log_level(config.mail_level.clone()),
|
|
||||||
interval: config.mail_interval,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Mail {
|
|
||||||
fn default() -> Self {
|
|
||||||
Mail {
|
|
||||||
show: false,
|
|
||||||
subject: String::default(),
|
|
||||||
smtp_server: String::default(),
|
|
||||||
starttls: bool::default(),
|
|
||||||
sender_addr: String::default(),
|
|
||||||
sender_pass: String::default(),
|
|
||||||
recipient: String::default(),
|
|
||||||
mail_level: Level::Debug,
|
|
||||||
interval: i64::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Logging {
|
|
||||||
pub ffmpeg_level: String,
|
|
||||||
pub ingest_level: String,
|
|
||||||
pub detect_silence: bool,
|
|
||||||
pub ignore_lines: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Logging {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
ffmpeg_level: config.logging_ffmpeg_level.clone(),
|
|
||||||
ingest_level: config.logging_ingest_level.clone(),
|
|
||||||
detect_silence: config.logging_detect_silence,
|
|
||||||
ignore_lines: config
|
|
||||||
.logging_ignore
|
|
||||||
.split(';')
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Processing {
|
|
||||||
pub mode: ProcessMode,
|
|
||||||
pub audio_only: bool,
|
|
||||||
pub copy_audio: bool,
|
|
||||||
pub copy_video: bool,
|
|
||||||
pub width: i64,
|
|
||||||
pub height: i64,
|
|
||||||
pub aspect: f64,
|
|
||||||
pub fps: f64,
|
|
||||||
pub add_logo: bool,
|
|
||||||
pub logo: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub logo_path: String,
|
|
||||||
pub logo_scale: String,
|
|
||||||
pub logo_opacity: f64,
|
|
||||||
pub logo_position: String,
|
|
||||||
pub audio_tracks: i32,
|
|
||||||
#[serde(default = "default_track_index")]
|
|
||||||
pub audio_track_index: i32,
|
|
||||||
pub audio_channels: u8,
|
|
||||||
pub volume: f64,
|
|
||||||
pub custom_filter: String,
|
|
||||||
#[serde(default)]
|
|
||||||
pub vtt_enable: bool,
|
|
||||||
#[serde(default)]
|
|
||||||
pub vtt_dummy: Option<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub cmd: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Processing {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
mode: ProcessMode::new(&config.processing_mode.clone()),
|
|
||||||
audio_only: config.processing_audio_only,
|
|
||||||
audio_track_index: config.processing_audio_track_index,
|
|
||||||
copy_audio: config.processing_copy_audio,
|
|
||||||
copy_video: config.processing_copy_video,
|
|
||||||
width: config.processing_width,
|
|
||||||
height: config.processing_height,
|
|
||||||
aspect: config.processing_aspect,
|
|
||||||
fps: config.processing_fps,
|
|
||||||
add_logo: config.processing_add_logo,
|
|
||||||
logo: config.processing_logo.clone(),
|
|
||||||
logo_path: config.processing_logo.clone(),
|
|
||||||
logo_scale: config.processing_logo_scale.clone(),
|
|
||||||
logo_opacity: config.processing_logo_opacity,
|
|
||||||
logo_position: config.processing_logo_position.clone(),
|
|
||||||
audio_tracks: config.processing_audio_tracks,
|
|
||||||
audio_channels: config.processing_audio_channels,
|
|
||||||
volume: config.processing_volume,
|
|
||||||
custom_filter: config.processing_filter.clone(),
|
|
||||||
vtt_enable: config.processing_vtt_enable,
|
|
||||||
vtt_dummy: config.processing_vtt_dummy.clone(),
|
|
||||||
cmd: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Ingest {
|
|
||||||
pub enable: bool,
|
|
||||||
pub input_param: String,
|
|
||||||
pub custom_filter: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub input_cmd: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ingest {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
enable: config.ingest_enable,
|
|
||||||
input_param: config.ingest_param.clone(),
|
|
||||||
custom_filter: config.ingest_filter.clone(),
|
|
||||||
input_cmd: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Playlist {
|
|
||||||
pub day_start: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub start_sec: Option<f64>,
|
|
||||||
pub length: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub length_sec: Option<f64>,
|
|
||||||
pub infinit: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Playlist {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
day_start: config.playlist_day_start.clone(),
|
|
||||||
start_sec: None,
|
|
||||||
length: config.playlist_length.clone(),
|
|
||||||
length_sec: None,
|
|
||||||
infinit: config.playlist_infinit,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Storage {
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub path: PathBuf,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub paths: Vec<PathBuf>,
|
|
||||||
pub filler: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub filler_path: PathBuf,
|
|
||||||
pub extensions: Vec<String>,
|
|
||||||
pub shuffle: bool,
|
|
||||||
#[serde(skip_deserializing)]
|
|
||||||
pub shared_storage: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Storage {
|
|
||||||
fn new(config: &models::Configuration, path: PathBuf, shared_storage: bool) -> Self {
|
|
||||||
Self {
|
|
||||||
path,
|
|
||||||
paths: vec![],
|
|
||||||
filler: config.storage_filler.clone(),
|
|
||||||
filler_path: PathBuf::from(config.storage_filler.clone()),
|
|
||||||
extensions: config
|
|
||||||
.storage_extensions
|
|
||||||
.split(';')
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect(),
|
|
||||||
shuffle: config.storage_shuffle,
|
|
||||||
shared_storage,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Text {
|
|
||||||
pub add_text: bool,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub node_pos: Option<usize>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub zmq_stream_socket: Option<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub zmq_server_socket: Option<String>,
|
|
||||||
#[serde(alias = "fontfile")]
|
|
||||||
pub font: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub font_path: String,
|
|
||||||
pub text_from_filename: bool,
|
|
||||||
pub style: String,
|
|
||||||
pub regex: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Text {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
add_text: config.text_add,
|
|
||||||
node_pos: None,
|
|
||||||
zmq_stream_socket: None,
|
|
||||||
zmq_server_socket: None,
|
|
||||||
font: config.text_font.clone(),
|
|
||||||
font_path: config.text_font.clone(),
|
|
||||||
text_from_filename: config.text_from_filename,
|
|
||||||
style: config.text_style.clone(),
|
|
||||||
regex: config.text_regex.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Task {
|
|
||||||
pub enable: bool,
|
|
||||||
pub path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Task {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
enable: config.task_enable,
|
|
||||||
path: PathBuf::from(config.task_path.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, TS)]
|
|
||||||
#[ts(export, export_to = "playout_config.d.ts")]
|
|
||||||
pub struct Output {
|
|
||||||
pub mode: OutputMode,
|
|
||||||
pub output_param: String,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub output_count: usize,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub output_filter: Option<String>,
|
|
||||||
#[ts(skip)]
|
|
||||||
#[serde(skip_serializing, skip_deserializing)]
|
|
||||||
pub output_cmd: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Output {
|
|
||||||
fn new(config: &models::Configuration) -> Self {
|
|
||||||
Self {
|
|
||||||
mode: OutputMode::new(&config.output_mode),
|
|
||||||
output_param: config.output_param.clone(),
|
|
||||||
output_count: 0,
|
|
||||||
output_filter: None,
|
|
||||||
output_cmd: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn string_to_log_level(l: String) -> Level {
|
|
||||||
match l.to_lowercase().as_str() {
|
|
||||||
"error" => Level::Error,
|
|
||||||
"info" => Level::Info,
|
|
||||||
"trace" => Level::Trace,
|
|
||||||
"warning" => Level::Warn,
|
|
||||||
_ => Level::Debug,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn string_to_processing_mode(l: String) -> ProcessMode {
|
|
||||||
match l.to_lowercase().as_str() {
|
|
||||||
"playlist" => ProcessMode::Playlist,
|
|
||||||
"folder" => ProcessMode::Folder,
|
|
||||||
_ => ProcessMode::Playlist,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn string_to_output_mode(l: String) -> OutputMode {
|
|
||||||
match l.to_lowercase().as_str() {
|
|
||||||
"desktop" => OutputMode::Desktop,
|
|
||||||
"hls" => OutputMode::HLS,
|
|
||||||
"null" => OutputMode::Null,
|
|
||||||
"stream" => OutputMode::Stream,
|
|
||||||
_ => OutputMode::HLS,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_track_index() -> i32 {
|
|
||||||
-1
|
|
||||||
}
|
|
||||||
|
|
||||||
// fn default_tracks() -> i32 {
|
|
||||||
// 1
|
|
||||||
// }
|
|
||||||
|
|
||||||
// fn default_channels() -> u8 {
|
|
||||||
// 2
|
|
||||||
// }
|
|
||||||
|
|
||||||
impl PlayoutConfig {
|
|
||||||
pub async fn new(pool: &Pool<Sqlite>, channel_id: i32) -> Result<Self, ServiceError> {
|
|
||||||
let global = handles::select_global(pool).await?;
|
|
||||||
let channel = handles::select_channel(pool, &channel_id).await?;
|
|
||||||
let config = handles::select_configuration(pool, channel_id).await?;
|
|
||||||
let adv_config = handles::select_advanced_configuration(pool, channel_id).await?;
|
|
||||||
|
|
||||||
let channel = Channel::new(&global, channel);
|
|
||||||
let advanced = AdvancedConfig::new(adv_config);
|
|
||||||
let general = General::new(&config);
|
|
||||||
let mail = Mail::new(&global, &config);
|
|
||||||
let logging = Logging::new(&config);
|
|
||||||
let mut processing = Processing::new(&config);
|
|
||||||
let mut ingest = Ingest::new(&config);
|
|
||||||
let mut playlist = Playlist::new(&config);
|
|
||||||
let mut text = Text::new(&config);
|
|
||||||
let task = Task::new(&config);
|
|
||||||
let mut output = Output::new(&config);
|
|
||||||
|
|
||||||
if !channel.storage.is_dir() {
|
|
||||||
tokio::fs::create_dir_all(&channel.storage)
|
|
||||||
.await
|
|
||||||
.unwrap_or_else(|_| panic!("Can't create storage folder: {:#?}", channel.storage));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut storage = Storage::new(&config, channel.storage.clone(), channel.shared);
|
|
||||||
|
|
||||||
if !channel.playlists.is_dir() {
|
|
||||||
tokio::fs::create_dir_all(&channel.playlists).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !channel.logs.is_dir() {
|
|
||||||
tokio::fs::create_dir_all(&channel.logs).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (filler_path, _, filler) = norm_abs_path(&channel.storage, &config.storage_filler)?;
|
|
||||||
|
|
||||||
storage.filler = filler;
|
|
||||||
storage.filler_path = filler_path;
|
|
||||||
|
|
||||||
playlist.start_sec = Some(time_to_sec(&playlist.day_start));
|
|
||||||
|
|
||||||
if playlist.length.contains(':') {
|
|
||||||
playlist.length_sec = Some(time_to_sec(&playlist.length));
|
|
||||||
} else {
|
|
||||||
playlist.length_sec = Some(86400.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
let (logo_path, _, logo) = norm_abs_path(&channel.storage, &processing.logo)?;
|
|
||||||
|
|
||||||
if processing.add_logo && !logo_path.is_file() {
|
|
||||||
processing.add_logo = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
processing.logo = logo;
|
|
||||||
processing.logo_path = logo_path.to_string_lossy().to_string();
|
|
||||||
|
|
||||||
if processing.audio_tracks < 1 {
|
|
||||||
processing.audio_tracks = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut process_cmd = vec_strings![];
|
|
||||||
|
|
||||||
if processing.audio_only {
|
|
||||||
process_cmd.append(&mut vec_strings!["-vn"]);
|
|
||||||
} else if processing.copy_video {
|
|
||||||
process_cmd.append(&mut vec_strings!["-c:v", "copy"]);
|
|
||||||
} else if let Some(decoder_cmd) = &advanced.decoder.output_cmd {
|
|
||||||
process_cmd.append(&mut decoder_cmd.clone());
|
|
||||||
} else {
|
|
||||||
let bitrate = format!("{}k", processing.width * processing.height / 16);
|
|
||||||
let buff_size = format!("{}k", (processing.width * processing.height / 16) / 2);
|
|
||||||
|
|
||||||
process_cmd.append(&mut vec_strings![
|
|
||||||
"-pix_fmt",
|
|
||||||
"yuv420p",
|
|
||||||
"-r",
|
|
||||||
&processing.fps,
|
|
||||||
"-c:v",
|
|
||||||
"mpeg2video",
|
|
||||||
"-g",
|
|
||||||
"1",
|
|
||||||
"-b:v",
|
|
||||||
&bitrate,
|
|
||||||
"-minrate",
|
|
||||||
&bitrate,
|
|
||||||
"-maxrate",
|
|
||||||
&bitrate,
|
|
||||||
"-bufsize",
|
|
||||||
&buff_size,
|
|
||||||
"-mpegts_flags",
|
|
||||||
"initial_discontinuity"
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if processing.copy_audio {
|
|
||||||
process_cmd.append(&mut vec_strings!["-c:a", "copy"]);
|
|
||||||
} else if advanced.decoder.output_cmd.is_none() {
|
|
||||||
process_cmd.append(&mut pre_audio_codec(
|
|
||||||
&processing.custom_filter,
|
|
||||||
&ingest.custom_filter,
|
|
||||||
processing.audio_channels,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
process_cmd.append(&mut vec_strings!["-f", "mpegts", "-"]);
|
|
||||||
|
|
||||||
processing.cmd = Some(process_cmd);
|
|
||||||
|
|
||||||
ingest.input_cmd = split(ingest.input_param.as_str());
|
|
||||||
|
|
||||||
output.output_count = 1;
|
|
||||||
output.output_filter = None;
|
|
||||||
|
|
||||||
if output.mode == OutputMode::Null {
|
|
||||||
output.output_cmd = Some(vec_strings!["-f", "null", "-"]);
|
|
||||||
} else if let Some(mut cmd) = split(output.output_param.as_str()) {
|
|
||||||
// get output count according to the var_stream_map value, or by counting output parameters
|
|
||||||
if let Some(i) = cmd.clone().iter().position(|m| m == "-var_stream_map") {
|
|
||||||
output.output_count = cmd[i + 1].split_whitespace().count();
|
|
||||||
} else {
|
|
||||||
output.output_count = cmd
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.filter(|(i, p)| i > &0 && !p.starts_with('-') && !cmd[i - 1].starts_with('-'))
|
|
||||||
.count();
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(i) = cmd.clone().iter().position(|r| r == "-filter_complex") {
|
|
||||||
output.output_filter = Some(cmd[i + 1].clone());
|
|
||||||
cmd.remove(i);
|
|
||||||
cmd.remove(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
let is_tee_muxer = cmd.contains(&"tee".to_string());
|
|
||||||
|
|
||||||
for item in cmd.iter_mut() {
|
|
||||||
if item.ends_with(".ts") || (item.ends_with(".m3u8") && item != "master.m3u8") {
|
|
||||||
if is_tee_muxer {
|
|
||||||
// Processes the `item` string to replace `.ts` and `.m3u8` filenames with their absolute paths.
|
|
||||||
// Ensures that the corresponding directories exist.
|
|
||||||
//
|
|
||||||
// - Uses regular expressions to identify `.ts` and `.m3u8` filenames within the `item` string.
|
|
||||||
// - For each identified filename, normalizes its path and checks if the parent directory exists.
|
|
||||||
// - Creates the parent directory if it does not exist.
|
|
||||||
// - Replaces the original filename in the `item` string with the normalized absolute path.
|
|
||||||
let re_ts = Regex::new(r"filename=(\S+?\.ts)").unwrap();
|
|
||||||
let re_m3 = Regex::new(r"\](\S+?\.m3u8)").unwrap();
|
|
||||||
|
|
||||||
for s in item.clone().split('|') {
|
|
||||||
if let Some(ts) = re_ts.captures(s).and_then(|p| p.get(1)) {
|
|
||||||
let (segment_path, _, _) =
|
|
||||||
norm_abs_path(&channel.public, ts.as_str())?;
|
|
||||||
let parent = segment_path.parent().ok_or("HLS parent path")?;
|
|
||||||
|
|
||||||
if !parent.is_dir() {
|
|
||||||
fs::create_dir_all(parent).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
item.clone_from(
|
|
||||||
&item.replace(ts.as_str(), &segment_path.to_string_lossy()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(m3) = re_m3.captures(s).and_then(|p| p.get(1)) {
|
|
||||||
let (m3u8_path, _, _) =
|
|
||||||
norm_abs_path(&channel.public, m3.as_str())?;
|
|
||||||
let parent = m3u8_path.parent().ok_or("HLS parent path")?;
|
|
||||||
|
|
||||||
if !parent.is_dir() {
|
|
||||||
fs::create_dir_all(parent).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
item.clone_from(
|
|
||||||
&item.replace(m3.as_str(), &m3u8_path.to_string_lossy()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if let Ok((public, _, _)) = norm_abs_path(&channel.public, item) {
|
|
||||||
let parent = public.parent().ok_or("HLS parent path")?;
|
|
||||||
|
|
||||||
if !parent.is_dir() {
|
|
||||||
fs::create_dir_all(parent).await?;
|
|
||||||
}
|
|
||||||
item.clone_from(&public.to_string_lossy().to_string());
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
output.output_cmd = Some(cmd);
|
|
||||||
}
|
|
||||||
|
|
||||||
// when text overlay without text_from_filename is on, turn also the RPC server on,
|
|
||||||
// to get text messages from it
|
|
||||||
if text.add_text && !text.text_from_filename {
|
|
||||||
text.zmq_stream_socket = gen_tcp_socket(String::new());
|
|
||||||
text.zmq_server_socket =
|
|
||||||
gen_tcp_socket(text.zmq_stream_socket.clone().unwrap_or_default());
|
|
||||||
text.node_pos = Some(2);
|
|
||||||
} else {
|
|
||||||
text.zmq_stream_socket = None;
|
|
||||||
text.zmq_server_socket = None;
|
|
||||||
text.node_pos = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (font_path, _, font) = norm_abs_path(&channel.storage, &text.font)?;
|
|
||||||
text.font = font;
|
|
||||||
text.font_path = font_path.to_string_lossy().to_string();
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
channel,
|
|
||||||
advanced,
|
|
||||||
general,
|
|
||||||
mail,
|
|
||||||
logging,
|
|
||||||
processing,
|
|
||||||
ingest,
|
|
||||||
playlist,
|
|
||||||
storage,
|
|
||||||
text,
|
|
||||||
task,
|
|
||||||
output,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn dump(pool: &Pool<Sqlite>, id: i32) -> Result<(), ServiceError> {
|
|
||||||
let config = Self::new(pool, id).await?;
|
|
||||||
|
|
||||||
let toml_string = toml_edit::ser::to_string_pretty(&config)?;
|
|
||||||
tokio::fs::write(&format!("ffplayout_{id}.toml"), toml_string).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn import(pool: &Pool<Sqlite>, id: i32, path: &Path) -> Result<(), ServiceError> {
|
|
||||||
if path.is_file() {
|
|
||||||
let mut file = tokio::fs::File::open(path).await?;
|
|
||||||
let mut contents = String::new();
|
|
||||||
file.read_to_string(&mut contents).await?;
|
|
||||||
|
|
||||||
let config: PlayoutConfig = toml_edit::de::from_str(&contents).unwrap();
|
|
||||||
|
|
||||||
handles::update_configuration(pool, id, config).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ServiceError::BadRequest("Path not exists!".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// impl Default for PlayoutConfig {
|
|
||||||
// fn default() -> Self {
|
|
||||||
// Self::new(1)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
/// When custom_filter contains loudnorm filter use a different audio encoder,
|
|
||||||
/// s302m has higher quality, but is experimental
|
|
||||||
/// and works not well together with the loudnorm filter.
|
|
||||||
fn pre_audio_codec(proc_filter: &str, ingest_filter: &str, channel_count: u8) -> Vec<String> {
|
|
||||||
let mut codec = vec_strings![
|
|
||||||
"-c:a",
|
|
||||||
"s302m",
|
|
||||||
"-strict",
|
|
||||||
"-2",
|
|
||||||
"-sample_fmt",
|
|
||||||
"s16",
|
|
||||||
"-ar",
|
|
||||||
"48000",
|
|
||||||
"-ac",
|
|
||||||
channel_count
|
|
||||||
];
|
|
||||||
|
|
||||||
if proc_filter.contains("loudnorm") || ingest_filter.contains("loudnorm") {
|
|
||||||
codec = vec_strings![
|
|
||||||
"-c:a",
|
|
||||||
"mp2",
|
|
||||||
"-b:a",
|
|
||||||
"384k",
|
|
||||||
"-ar",
|
|
||||||
"48000",
|
|
||||||
"-ac",
|
|
||||||
channel_count
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
codec
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read command line arguments, and override the config with them.
|
|
||||||
pub async fn get_config(
|
|
||||||
pool: &Pool<Sqlite>,
|
|
||||||
channel_id: i32,
|
|
||||||
) -> Result<PlayoutConfig, ServiceError> {
|
|
||||||
let mut config = PlayoutConfig::new(pool, channel_id).await?;
|
|
||||||
let args = ARGS.clone();
|
|
||||||
|
|
||||||
config.general.generate = args.generate;
|
|
||||||
config.general.validate = args.validate;
|
|
||||||
config.general.skip_validation = args.skip_validation;
|
|
||||||
|
|
||||||
if let Some(template_file) = args.template {
|
|
||||||
let mut f = fs::File::options()
|
|
||||||
.read(true)
|
|
||||||
.write(false)
|
|
||||||
.open(template_file)
|
|
||||||
.await?;
|
|
||||||
let mut buffer = Vec::new();
|
|
||||||
f.read_to_end(&mut buffer).await?;
|
|
||||||
|
|
||||||
let mut template: Template = serde_json::from_slice(&buffer)?;
|
|
||||||
|
|
||||||
template.sources.sort_by(|d1, d2| d1.start.cmp(&d2.start));
|
|
||||||
|
|
||||||
config.general.template = Some(template);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(paths) = args.paths {
|
|
||||||
config.storage.paths = paths;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(playlist) = args.playlists {
|
|
||||||
config.channel.playlists = PathBuf::from(&playlist);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(folder) = args.folder {
|
|
||||||
config.channel.storage = folder;
|
|
||||||
config.processing.mode = ProcessMode::Folder;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(start) = args.start {
|
|
||||||
config.playlist.day_start.clone_from(&start);
|
|
||||||
config.playlist.start_sec = Some(time_to_sec(&start));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(output) = args.output {
|
|
||||||
config.output.mode = output;
|
|
||||||
|
|
||||||
if config.output.mode == OutputMode::Null {
|
|
||||||
config.output.output_count = 1;
|
|
||||||
config.output.output_filter = None;
|
|
||||||
config.output.output_cmd = Some(vec_strings!["-f", "null", "-"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(volume) = args.volume {
|
|
||||||
config.processing.volume = volume;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(mail_smtp) = args.mail_smtp {
|
|
||||||
config.mail.smtp_server = mail_smtp;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(mail_user) = args.mail_user {
|
|
||||||
config.mail.sender_addr = mail_user;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(mail_password) = args.mail_password {
|
|
||||||
config.mail.sender_pass = mail_password;
|
|
||||||
}
|
|
||||||
|
|
||||||
if args.mail_starttls {
|
|
||||||
config.mail.starttls = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(config)
|
|
||||||
}
|
|
@ -1,232 +0,0 @@
|
|||||||
use std::{error::Error, fmt, str::FromStr, sync::atomic::Ordering};
|
|
||||||
|
|
||||||
use log::*;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use serde_json::{json, Map, Value};
|
|
||||||
use sqlx::{Pool, Sqlite};
|
|
||||||
use zeromq::{Socket, SocketRecv, SocketSend, ZmqMessage};
|
|
||||||
|
|
||||||
use crate::db::handles;
|
|
||||||
use crate::player::{
|
|
||||||
controller::{ChannelManager, ProcessUnit::*},
|
|
||||||
utils::{get_delta, get_media_map},
|
|
||||||
};
|
|
||||||
use crate::utils::{config::OutputMode::*, errors::ServiceError, logging::Target, TextFilter};
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
|
||||||
struct TextParams {
|
|
||||||
control: String,
|
|
||||||
message: TextFilter,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
|
||||||
pub struct ControlParams {
|
|
||||||
pub control: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
|
||||||
struct MediaParams {
|
|
||||||
media: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum ProcessCtl {
|
|
||||||
Status,
|
|
||||||
Start,
|
|
||||||
Stop,
|
|
||||||
Restart,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for ProcessCtl {
|
|
||||||
type Err = String;
|
|
||||||
|
|
||||||
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
|
||||||
match input.to_lowercase().as_str() {
|
|
||||||
"status" => Ok(Self::Status),
|
|
||||||
"start" => Ok(Self::Start),
|
|
||||||
"stop" => Ok(Self::Stop),
|
|
||||||
"restart" => Ok(Self::Restart),
|
|
||||||
_ => Err(format!("Command '{input}' not found!")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for ProcessCtl {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
Self::Status => write!(f, "status"),
|
|
||||||
Self::Start => write!(f, "start"),
|
|
||||||
Self::Stop => write!(f, "stop"),
|
|
||||||
Self::Restart => write!(f, "restart"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
|
||||||
pub struct Process {
|
|
||||||
pub command: ProcessCtl,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn zmq_send(msg: &str, socket_addr: &str) -> Result<String, Box<dyn Error>> {
|
|
||||||
let mut socket = zeromq::ReqSocket::new();
|
|
||||||
socket.connect(&format!("tcp://{socket_addr}")).await?;
|
|
||||||
socket.send(msg.into()).await?;
|
|
||||||
let repl: ZmqMessage = socket.recv().await?;
|
|
||||||
let response = String::from_utf8(repl.into_vec()[0].to_vec())?;
|
|
||||||
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn send_message(
|
|
||||||
manager: ChannelManager,
|
|
||||||
message: TextFilter,
|
|
||||||
) -> Result<Map<String, Value>, ServiceError> {
|
|
||||||
let filter = message.to_string();
|
|
||||||
let mut data_map = Map::new();
|
|
||||||
let config = manager.config.lock().unwrap().clone();
|
|
||||||
let id = config.general.channel_id;
|
|
||||||
|
|
||||||
if config.text.zmq_stream_socket.is_some() {
|
|
||||||
if let Some(clips_filter) = manager.filter_chain.clone() {
|
|
||||||
*clips_filter.lock().unwrap() = vec![filter.clone()];
|
|
||||||
}
|
|
||||||
|
|
||||||
if config.output.mode == HLS {
|
|
||||||
if manager.ingest_is_running.load(Ordering::SeqCst) {
|
|
||||||
let filter_server = format!("drawtext@dyntext reinit {filter}");
|
|
||||||
|
|
||||||
if let Ok(reply) = zmq_send(
|
|
||||||
&filter_server,
|
|
||||||
&config.text.zmq_server_socket.clone().unwrap(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
data_map.insert("message".to_string(), json!(reply));
|
|
||||||
return Ok(data_map);
|
|
||||||
};
|
|
||||||
} else if let Err(e) = manager.stop(Ingest) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "Ingest {e:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if config.output.mode != HLS || !manager.ingest_is_running.load(Ordering::SeqCst) {
|
|
||||||
let filter_stream = format!("drawtext@dyntext reinit {filter}");
|
|
||||||
|
|
||||||
if let Ok(reply) = zmq_send(
|
|
||||||
&filter_stream,
|
|
||||||
&config.text.zmq_stream_socket.clone().unwrap(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
data_map.insert("message".to_string(), json!(reply));
|
|
||||||
return Ok(data_map);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(ServiceError::ServiceUnavailable(
|
|
||||||
"text message missing!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn control_state(
|
|
||||||
conn: &Pool<Sqlite>,
|
|
||||||
manager: &ChannelManager,
|
|
||||||
command: &str,
|
|
||||||
) -> Result<Map<String, Value>, ServiceError> {
|
|
||||||
let config = manager.config.lock().unwrap().clone();
|
|
||||||
let id = config.general.channel_id;
|
|
||||||
let current_date = manager.current_date.lock().unwrap().clone();
|
|
||||||
let current_list = manager.current_list.lock().unwrap().clone();
|
|
||||||
let mut date = manager.current_date.lock().unwrap().clone();
|
|
||||||
let index = manager.current_index.load(Ordering::SeqCst);
|
|
||||||
|
|
||||||
match command {
|
|
||||||
"back" => {
|
|
||||||
if index > 1 && current_list.len() > 1 {
|
|
||||||
let mut data_map = Map::new();
|
|
||||||
let mut media = current_list[index - 2].clone();
|
|
||||||
let (delta, _) = get_delta(&config, &media.begin.unwrap_or(0.0));
|
|
||||||
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Move to last clip");
|
|
||||||
|
|
||||||
manager.current_index.fetch_sub(2, Ordering::SeqCst);
|
|
||||||
|
|
||||||
if let Err(e) = media.add_probe(false) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e:?}");
|
|
||||||
};
|
|
||||||
|
|
||||||
manager.channel.lock().unwrap().time_shift = delta;
|
|
||||||
date.clone_from(¤t_date);
|
|
||||||
handles::update_stat(conn, config.general.channel_id, current_date, delta).await?;
|
|
||||||
|
|
||||||
if manager.stop(Decoder).is_err() {
|
|
||||||
return Err(ServiceError::InternalServerError);
|
|
||||||
};
|
|
||||||
|
|
||||||
data_map.insert("operation".to_string(), json!("move_to_last"));
|
|
||||||
data_map.insert("shifted_seconds".to_string(), json!(delta));
|
|
||||||
data_map.insert("media".to_string(), get_media_map(media));
|
|
||||||
|
|
||||||
return Ok(data_map);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
"next" => {
|
|
||||||
if index < current_list.len() {
|
|
||||||
let mut data_map = Map::new();
|
|
||||||
let mut media = current_list[index].clone();
|
|
||||||
let (delta, _) = get_delta(&config, &media.begin.unwrap_or(0.0));
|
|
||||||
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Move to next clip");
|
|
||||||
|
|
||||||
if let Err(e) = media.add_probe(false) {
|
|
||||||
error!(target: Target::file_mail(), channel = id; "{e:?}");
|
|
||||||
};
|
|
||||||
|
|
||||||
manager.channel.lock().unwrap().time_shift = delta;
|
|
||||||
date.clone_from(¤t_date);
|
|
||||||
handles::update_stat(conn, config.general.channel_id, current_date, delta).await?;
|
|
||||||
|
|
||||||
if manager.stop(Decoder).is_err() {
|
|
||||||
return Err(ServiceError::InternalServerError);
|
|
||||||
};
|
|
||||||
|
|
||||||
data_map.insert("operation".to_string(), json!("move_to_next"));
|
|
||||||
data_map.insert("shifted_seconds".to_string(), json!(delta));
|
|
||||||
data_map.insert("media".to_string(), get_media_map(media));
|
|
||||||
|
|
||||||
return Ok(data_map);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
"reset" => {
|
|
||||||
let mut data_map = Map::new();
|
|
||||||
|
|
||||||
info!(target: Target::file_mail(), channel = id; "Reset playout to original state");
|
|
||||||
|
|
||||||
manager.channel.lock().unwrap().time_shift = 0.0;
|
|
||||||
date.clone_from(¤t_date);
|
|
||||||
manager.list_init.store(true, Ordering::SeqCst);
|
|
||||||
|
|
||||||
handles::update_stat(conn, config.general.channel_id, current_date, 0.0).await?;
|
|
||||||
|
|
||||||
if manager.stop(Decoder).is_err() {
|
|
||||||
return Err(ServiceError::InternalServerError);
|
|
||||||
};
|
|
||||||
|
|
||||||
data_map.insert("operation".to_string(), json!("reset_playout_state"));
|
|
||||||
|
|
||||||
return Ok(data_map);
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {
|
|
||||||
return Err(ServiceError::ServiceUnavailable(
|
|
||||||
"Command not found!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Map::new())
|
|
||||||
}
|
|
@ -1,490 +0,0 @@
|
|||||||
use std::{
|
|
||||||
collections::{hash_map, HashMap},
|
|
||||||
env,
|
|
||||||
io::{self, ErrorKind, Write},
|
|
||||||
path::PathBuf,
|
|
||||||
sync::{Arc, Mutex},
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
use actix_web::rt::time::interval;
|
|
||||||
use flexi_logger::{
|
|
||||||
writers::{FileLogWriter, LogWriter},
|
|
||||||
Age, Cleanup, Criterion, DeferredNow, FileSpec, Level, LogSpecification, Logger, Naming,
|
|
||||||
};
|
|
||||||
use lettre::{
|
|
||||||
message::header, transport::smtp::authentication::Credentials, AsyncSmtpTransport,
|
|
||||||
AsyncTransport, Message, Tokio1Executor,
|
|
||||||
};
|
|
||||||
use log::{kv::Value, *};
|
|
||||||
use paris::formatter::colorize_string;
|
|
||||||
use regex::Regex;
|
|
||||||
|
|
||||||
use super::ARGS;
|
|
||||||
|
|
||||||
use crate::db::models::GlobalSettings;
|
|
||||||
use crate::utils::{config::Mail, errors::ProcessError, round_to_nearest_ten};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Target;
|
|
||||||
|
|
||||||
impl Target {
|
|
||||||
pub fn all() -> &'static str {
|
|
||||||
if ARGS.log_to_console {
|
|
||||||
"{_Default}"
|
|
||||||
} else {
|
|
||||||
"{file,mail,_Default}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn console() -> &'static str {
|
|
||||||
"{console}"
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file() -> &'static str {
|
|
||||||
"{file}"
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mail() -> &'static str {
|
|
||||||
"{mail}"
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_mail() -> &'static str {
|
|
||||||
"{file,mail}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct LogConsole;
|
|
||||||
|
|
||||||
impl LogWriter for LogConsole {
|
|
||||||
fn write(&self, now: &mut DeferredNow, record: &Record<'_>) -> std::io::Result<()> {
|
|
||||||
console_formatter(&mut std::io::stderr(), now, record)?;
|
|
||||||
|
|
||||||
println!();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
fn flush(&self) -> std::io::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct MultiFileLogger {
|
|
||||||
log_path: PathBuf,
|
|
||||||
writers: Arc<Mutex<HashMap<i32, Arc<Mutex<FileLogWriter>>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MultiFileLogger {
|
|
||||||
pub fn new(log_path: PathBuf) -> Self {
|
|
||||||
MultiFileLogger {
|
|
||||||
log_path,
|
|
||||||
writers: Arc::new(Mutex::new(HashMap::new())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_writer(&self, channel: i32) -> io::Result<Arc<Mutex<FileLogWriter>>> {
|
|
||||||
let mut writers = self.writers.lock().unwrap();
|
|
||||||
if let hash_map::Entry::Vacant(e) = writers.entry(channel) {
|
|
||||||
let writer = FileLogWriter::builder(
|
|
||||||
FileSpec::default()
|
|
||||||
.suppress_timestamp()
|
|
||||||
.directory(&self.log_path)
|
|
||||||
.basename("ffplayout")
|
|
||||||
.discriminant(channel.to_string()),
|
|
||||||
)
|
|
||||||
.format(file_formatter)
|
|
||||||
.append()
|
|
||||||
.rotate(
|
|
||||||
Criterion::Age(Age::Day),
|
|
||||||
Naming::TimestampsCustomFormat {
|
|
||||||
current_infix: Some(""),
|
|
||||||
format: "%Y-%m-%d",
|
|
||||||
},
|
|
||||||
Cleanup::KeepLogFiles(ARGS.log_backup_count.unwrap_or(14)),
|
|
||||||
)
|
|
||||||
.try_build()
|
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
|
|
||||||
e.insert(Arc::new(Mutex::new(writer)));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(writers.get(&channel).unwrap().clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LogWriter for MultiFileLogger {
|
|
||||||
fn write(&self, now: &mut DeferredNow, record: &Record) -> io::Result<()> {
|
|
||||||
let channel = i32::try_from(
|
|
||||||
record
|
|
||||||
.key_values()
|
|
||||||
.get("channel".into())
|
|
||||||
.unwrap_or(Value::null())
|
|
||||||
.to_i64()
|
|
||||||
.unwrap_or(0),
|
|
||||||
)
|
|
||||||
.unwrap_or(0);
|
|
||||||
let writer = self.get_writer(channel);
|
|
||||||
let w = writer?.lock().unwrap().write(now, record);
|
|
||||||
|
|
||||||
w
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&self) -> io::Result<()> {
|
|
||||||
let writers = self.writers.lock().unwrap();
|
|
||||||
for writer in writers.values() {
|
|
||||||
writer.lock().unwrap().flush()?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct LogMailer {
|
|
||||||
pub mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>,
|
|
||||||
raw_lines: Arc<Mutex<Vec<String>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LogMailer {
|
|
||||||
pub fn new(mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>) -> Self {
|
|
||||||
Self {
|
|
||||||
mail_queues,
|
|
||||||
raw_lines: Arc::new(Mutex::new(vec![])),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LogWriter for LogMailer {
|
|
||||||
fn write(&self, now: &mut DeferredNow, record: &Record<'_>) -> std::io::Result<()> {
|
|
||||||
let id = i32::try_from(
|
|
||||||
record
|
|
||||||
.key_values()
|
|
||||||
.get("channel".into())
|
|
||||||
.unwrap_or(Value::null())
|
|
||||||
.to_i64()
|
|
||||||
.unwrap_or(0),
|
|
||||||
)
|
|
||||||
.unwrap_or(0);
|
|
||||||
|
|
||||||
let mut queues = self.mail_queues.lock().unwrap_or_else(|poisoned| {
|
|
||||||
error!("Queues mutex was poisoned");
|
|
||||||
poisoned.into_inner()
|
|
||||||
});
|
|
||||||
|
|
||||||
for queue in queues.iter_mut() {
|
|
||||||
let mut q_lock = queue.lock().unwrap_or_else(|poisoned| {
|
|
||||||
error!("Queue mutex was poisoned");
|
|
||||||
poisoned.into_inner()
|
|
||||||
});
|
|
||||||
|
|
||||||
let msg = strip_tags(&record.args().to_string());
|
|
||||||
let mut raw_lines = self.raw_lines.lock().unwrap();
|
|
||||||
|
|
||||||
if q_lock.id == id && q_lock.level_eq(record.level()) && !raw_lines.contains(&msg) {
|
|
||||||
q_lock.push(format!(
|
|
||||||
"[{}] [{:>5}] {}",
|
|
||||||
now.now().format("%Y-%m-%d %H:%M:%S"),
|
|
||||||
record.level(),
|
|
||||||
msg.clone()
|
|
||||||
));
|
|
||||||
raw_lines.push(msg);
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if raw_lines.len() > 1000 {
|
|
||||||
let last = raw_lines.pop().unwrap();
|
|
||||||
raw_lines.clear();
|
|
||||||
raw_lines.push(last);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
fn flush(&self) -> std::io::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct MailQueue {
|
|
||||||
pub id: i32,
|
|
||||||
pub config: Mail,
|
|
||||||
pub lines: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MailQueue {
|
|
||||||
pub fn new(id: i32, config: Mail) -> Self {
|
|
||||||
Self {
|
|
||||||
id,
|
|
||||||
config,
|
|
||||||
lines: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn level_eq(&self, level: Level) -> bool {
|
|
||||||
level <= self.config.mail_level
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update(&mut self, config: Mail) {
|
|
||||||
self.config = config;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.lines.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn push(&mut self, line: String) {
|
|
||||||
self.lines.push(line);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn text(&self) -> String {
|
|
||||||
self.lines.join("\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_empty(&self) -> bool {
|
|
||||||
self.lines.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn strip_tags(input: &str) -> String {
|
|
||||||
let re = Regex::new(r"<[^>]*>").unwrap();
|
|
||||||
re.replace_all(input, "").to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn console_formatter(w: &mut dyn Write, now: &mut DeferredNow, record: &Record) -> io::Result<()> {
|
|
||||||
let log_line = match record.level() {
|
|
||||||
Level::Debug => colorize_string(format!("<bright-blue>[DEBUG]</> {}", record.args())),
|
|
||||||
Level::Error => colorize_string(format!("<bright-red>[ERROR]</> {}", record.args())),
|
|
||||||
Level::Info => colorize_string(format!("<bright-green>[ INFO]</> {}", record.args())),
|
|
||||||
Level::Trace => colorize_string(format!(
|
|
||||||
"<bright-yellow>[TRACE]</> {}:{} {}",
|
|
||||||
record.file().unwrap_or_default(),
|
|
||||||
record.line().unwrap_or_default(),
|
|
||||||
record.args()
|
|
||||||
)),
|
|
||||||
Level::Warn => colorize_string(format!("<yellow>[ WARN]</> {}", record.args())),
|
|
||||||
};
|
|
||||||
|
|
||||||
if ARGS.log_timestamp {
|
|
||||||
write!(
|
|
||||||
w,
|
|
||||||
"{} {}",
|
|
||||||
colorize_string(format!(
|
|
||||||
"<bright black>[{}]</>",
|
|
||||||
now.now().format("%Y-%m-%d %H:%M:%S%.6f")
|
|
||||||
)),
|
|
||||||
log_line
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
write!(w, "{}", log_line)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_formatter(
|
|
||||||
w: &mut dyn Write,
|
|
||||||
now: &mut DeferredNow,
|
|
||||||
record: &Record,
|
|
||||||
) -> std::io::Result<()> {
|
|
||||||
write!(
|
|
||||||
w,
|
|
||||||
"[{}] [{:>5}] {}",
|
|
||||||
now.now().format("%Y-%m-%d %H:%M:%S%.6f"),
|
|
||||||
record.level(),
|
|
||||||
record.args()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn log_file_path() -> PathBuf {
|
|
||||||
let config = GlobalSettings::global();
|
|
||||||
let mut log_path = PathBuf::from(&ARGS.logs.as_ref().unwrap_or(&config.logs));
|
|
||||||
|
|
||||||
if !log_path.is_absolute() {
|
|
||||||
log_path = env::current_dir().unwrap().join(log_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !log_path.is_dir() {
|
|
||||||
log_path = env::current_dir().unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
log_path
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_logger() -> Box<dyn LogWriter> {
|
|
||||||
if ARGS.log_to_console {
|
|
||||||
Box::new(LogConsole)
|
|
||||||
} else {
|
|
||||||
Box::new(MultiFileLogger::new(log_file_path()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// send log messages to mail recipient
|
|
||||||
pub async fn send_mail(config: &Mail, msg: String) -> Result<(), ProcessError> {
|
|
||||||
let recipient = config
|
|
||||||
.recipient
|
|
||||||
.split_terminator([',', ';', ' '])
|
|
||||||
.filter(|s| s.contains('@'))
|
|
||||||
.map(|s| s.trim())
|
|
||||||
.collect::<Vec<&str>>();
|
|
||||||
|
|
||||||
let mut message = Message::builder()
|
|
||||||
.from(config.sender_addr.parse()?)
|
|
||||||
.subject(&config.subject)
|
|
||||||
.header(header::ContentType::TEXT_PLAIN);
|
|
||||||
|
|
||||||
for r in recipient {
|
|
||||||
message = message.to(r.parse()?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mail = message.body(msg)?;
|
|
||||||
let credentials = Credentials::new(config.sender_addr.clone(), config.sender_pass.clone());
|
|
||||||
|
|
||||||
let mut transporter =
|
|
||||||
AsyncSmtpTransport::<Tokio1Executor>::relay(config.smtp_server.clone().as_str());
|
|
||||||
|
|
||||||
if config.starttls {
|
|
||||||
transporter = AsyncSmtpTransport::<Tokio1Executor>::starttls_relay(
|
|
||||||
config.smtp_server.clone().as_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mailer = transporter?.credentials(credentials).build();
|
|
||||||
|
|
||||||
// Send the mail
|
|
||||||
mailer.send(mail).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Basic Mail Queue
|
|
||||||
///
|
|
||||||
/// Check every give seconds for messages and send them.
|
|
||||||
pub fn mail_queue(mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>) {
|
|
||||||
actix_web::rt::spawn(async move {
|
|
||||||
let sec = 10;
|
|
||||||
let mut interval = interval(Duration::from_secs(sec));
|
|
||||||
let mut counter = 0;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
interval.tick().await;
|
|
||||||
let mut tasks = vec![];
|
|
||||||
|
|
||||||
// Reset the counter after one day
|
|
||||||
if counter >= 86400 {
|
|
||||||
counter = 0;
|
|
||||||
} else {
|
|
||||||
counter += sec;
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let mut queues = match mail_queues.lock() {
|
|
||||||
Ok(l) => l,
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to lock mail_queues {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Process mail queues and send emails
|
|
||||||
for queue in queues.iter_mut() {
|
|
||||||
let interval = round_to_nearest_ten(counter as i64);
|
|
||||||
let mut q_lock = queue.lock().unwrap_or_else(|poisoned| {
|
|
||||||
error!("Queue mutex was poisoned");
|
|
||||||
|
|
||||||
poisoned.into_inner()
|
|
||||||
});
|
|
||||||
|
|
||||||
let expire = round_to_nearest_ten(q_lock.config.interval.max(30));
|
|
||||||
|
|
||||||
if interval % expire == 0 && !q_lock.is_empty() {
|
|
||||||
if q_lock.config.recipient.contains('@') {
|
|
||||||
tasks.push((q_lock.config.clone(), q_lock.text().clone(), q_lock.id));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear the messages after sending the email
|
|
||||||
q_lock.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (config, text, id) in tasks {
|
|
||||||
if let Err(e) = send_mail(&config, text).await {
|
|
||||||
error!(target: "{file}", channel = id; "Failed to send mail: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize our logging, to have:
|
|
||||||
///
|
|
||||||
/// - console logger
|
|
||||||
/// - file logger
|
|
||||||
/// - mail logger
|
|
||||||
pub fn init_logging(mail_queues: Arc<Mutex<Vec<Arc<Mutex<MailQueue>>>>>) -> io::Result<()> {
|
|
||||||
let log_level = match ARGS
|
|
||||||
.log_level
|
|
||||||
.clone()
|
|
||||||
.unwrap_or("debug".to_string())
|
|
||||||
.to_lowercase()
|
|
||||||
.as_str()
|
|
||||||
{
|
|
||||||
"debug" => LevelFilter::Debug,
|
|
||||||
"error" => LevelFilter::Error,
|
|
||||||
"info" => LevelFilter::Info,
|
|
||||||
"trace" => LevelFilter::Trace,
|
|
||||||
"warn" => LevelFilter::Warn,
|
|
||||||
"off" => LevelFilter::Off,
|
|
||||||
_ => LevelFilter::Debug,
|
|
||||||
};
|
|
||||||
|
|
||||||
mail_queue(mail_queues.clone());
|
|
||||||
|
|
||||||
// Build the initial log specification
|
|
||||||
let mut builder = LogSpecification::builder();
|
|
||||||
builder
|
|
||||||
.default(log_level)
|
|
||||||
.module("actix", LevelFilter::Info)
|
|
||||||
.module("actix_files", LevelFilter::Info)
|
|
||||||
.module("actix_web", LevelFilter::Info)
|
|
||||||
.module("actix_web_service", LevelFilter::Error)
|
|
||||||
.module("hyper", LevelFilter::Error)
|
|
||||||
.module("flexi_logger", LevelFilter::Error)
|
|
||||||
.module("libc", LevelFilter::Error)
|
|
||||||
.module("log", LevelFilter::Error)
|
|
||||||
.module("mio", LevelFilter::Error)
|
|
||||||
.module("neli", LevelFilter::Error)
|
|
||||||
.module("reqwest", LevelFilter::Error)
|
|
||||||
.module("rpc", LevelFilter::Error)
|
|
||||||
.module("rustls", LevelFilter::Error)
|
|
||||||
.module("serial_test", LevelFilter::Error)
|
|
||||||
.module("sqlx", LevelFilter::Error)
|
|
||||||
.module("tokio", LevelFilter::Error);
|
|
||||||
|
|
||||||
Logger::with(builder.build())
|
|
||||||
.format(console_formatter)
|
|
||||||
.log_to_stderr()
|
|
||||||
.add_writer("file", file_logger())
|
|
||||||
.add_writer("mail", Box::new(LogMailer::new(mail_queues)))
|
|
||||||
.start()
|
|
||||||
.map_err(|e| io::Error::new(ErrorKind::Other, e.to_string()))?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Format ingest and HLS logging output
|
|
||||||
pub fn log_line(line: &str, level: &str) {
|
|
||||||
if line.contains("[info]") && level.to_lowercase() == "info" {
|
|
||||||
info!("<bright black>[Server]</> {}", line.replace("[info] ", ""))
|
|
||||||
} else if line.contains("[warning]")
|
|
||||||
&& (level.to_lowercase() == "warning" || level.to_lowercase() == "info")
|
|
||||||
{
|
|
||||||
warn!(
|
|
||||||
"<bright black>[Server]</> {}",
|
|
||||||
line.replace("[warning] ", "")
|
|
||||||
)
|
|
||||||
} else if line.contains("[error]")
|
|
||||||
&& !line.contains("Input/output error")
|
|
||||||
&& !line.contains("Broken pipe")
|
|
||||||
{
|
|
||||||
error!("<bright black>[Server]</> {}", line.replace("[error] ", ""));
|
|
||||||
} else if line.contains("[fatal]") {
|
|
||||||
error!("<bright black>[Server]</> {}", line.replace("[fatal] ", ""))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,391 +0,0 @@
|
|||||||
use std::{
|
|
||||||
env, fmt,
|
|
||||||
net::TcpListener,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
use std::os::unix::fs::MetadataExt;
|
|
||||||
|
|
||||||
use chrono::{format::ParseErrorKind, prelude::*};
|
|
||||||
use faccess::PathExt;
|
|
||||||
use log::*;
|
|
||||||
use path_clean::PathClean;
|
|
||||||
use rand::Rng;
|
|
||||||
use regex::Regex;
|
|
||||||
use tokio::{fs, process::Command};
|
|
||||||
|
|
||||||
use serde::{
|
|
||||||
de::{self, Visitor},
|
|
||||||
Deserialize, Deserializer, Serialize,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub mod advanced_config;
|
|
||||||
pub mod args_parse;
|
|
||||||
pub mod channels;
|
|
||||||
pub mod config;
|
|
||||||
pub mod control;
|
|
||||||
pub mod errors;
|
|
||||||
pub mod files;
|
|
||||||
pub mod generator;
|
|
||||||
pub mod logging;
|
|
||||||
pub mod playlist;
|
|
||||||
pub mod system;
|
|
||||||
pub mod task_runner;
|
|
||||||
|
|
||||||
use crate::db::models::GlobalSettings;
|
|
||||||
use crate::player::utils::time_to_sec;
|
|
||||||
use crate::utils::{errors::ServiceError, logging::log_file_path};
|
|
||||||
use crate::ARGS;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
|
||||||
pub struct TextFilter {
|
|
||||||
pub text: Option<String>,
|
|
||||||
#[serde(default, deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub x: Option<String>,
|
|
||||||
#[serde(default, deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub y: Option<String>,
|
|
||||||
#[serde(default, deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub fontsize: Option<String>,
|
|
||||||
#[serde(default, deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub line_spacing: Option<String>,
|
|
||||||
pub fontcolor: Option<String>,
|
|
||||||
#[serde(default, deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub alpha: Option<String>,
|
|
||||||
#[serde(default, deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub r#box: Option<String>,
|
|
||||||
pub boxcolor: Option<String>,
|
|
||||||
#[serde(default, deserialize_with = "deserialize_number_or_string")]
|
|
||||||
pub boxborderw: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Deserialize number or string
|
|
||||||
pub fn deserialize_number_or_string<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
struct StringOrNumberVisitor;
|
|
||||||
|
|
||||||
impl<'de> Visitor<'de> for StringOrNumberVisitor {
|
|
||||||
type Value = Option<String>;
|
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
||||||
formatter.write_str("a string or a number")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
|
|
||||||
let re = Regex::new(r"0,([0-9]+)").unwrap();
|
|
||||||
let clean_string = re.replace_all(value, "0.$1").to_string();
|
|
||||||
Ok(Some(clean_string))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Self::Value, E> {
|
|
||||||
Ok(Some(value.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_i64<E: de::Error>(self, value: i64) -> Result<Self::Value, E> {
|
|
||||||
Ok(Some(value.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
|
|
||||||
Ok(Some(value.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
deserializer.deserialize_any(StringOrNumberVisitor)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for TextFilter {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
let escaped_text = self
|
|
||||||
.text
|
|
||||||
.clone()
|
|
||||||
.unwrap_or_default()
|
|
||||||
.replace('\'', "'\\\\\\''")
|
|
||||||
.replace('\\', "\\\\\\\\")
|
|
||||||
.replace('%', "\\\\\\%")
|
|
||||||
.replace(':', "\\:");
|
|
||||||
|
|
||||||
let mut s = format!("text='{escaped_text}'");
|
|
||||||
|
|
||||||
if let Some(v) = &self.x {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":x='{v}'"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.y {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":y='{v}'"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.fontsize {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":fontsize={v}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.line_spacing {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":line_spacing={v}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.fontcolor {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":fontcolor={v}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.alpha {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":alpha='{v}'"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.r#box {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":box={v}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.boxcolor {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":boxcolor={v}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(v) = &self.boxborderw {
|
|
||||||
if !v.is_empty() {
|
|
||||||
s.push_str(&format!(":boxborderw={v}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(f, "{s}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn db_path() -> Result<&'static str, Box<dyn std::error::Error>> {
|
|
||||||
if let Some(path) = ARGS.db.clone() {
|
|
||||||
let mut absolute_path = if path.is_absolute() {
|
|
||||||
path
|
|
||||||
} else {
|
|
||||||
env::current_dir()?.join(path)
|
|
||||||
}
|
|
||||||
.clean();
|
|
||||||
|
|
||||||
if absolute_path.is_dir() {
|
|
||||||
absolute_path = absolute_path.join("ffplayout.db");
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(abs_path) = absolute_path.parent() {
|
|
||||||
if abs_path.writable() {
|
|
||||||
return Ok(Box::leak(
|
|
||||||
absolute_path.to_string_lossy().to_string().into_boxed_str(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
error!("Given database path is not writable!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let sys_path = Path::new("/usr/share/ffplayout/db");
|
|
||||||
let mut db_path = "./ffplayout.db";
|
|
||||||
|
|
||||||
if sys_path.is_dir() && !sys_path.writable() {
|
|
||||||
error!("Path {} is not writable!", sys_path.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
if sys_path.is_dir() && sys_path.writable() {
|
|
||||||
db_path = "/usr/share/ffplayout/db/ffplayout.db";
|
|
||||||
} else if Path::new("./assets").is_dir() {
|
|
||||||
db_path = "./assets/ffplayout.db";
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(db_path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn public_path() -> PathBuf {
|
|
||||||
let config = GlobalSettings::global();
|
|
||||||
let dev_path = env::current_dir()
|
|
||||||
.unwrap_or_default()
|
|
||||||
.join("frontend/.output/public/");
|
|
||||||
let mut public_path = PathBuf::from(&config.public);
|
|
||||||
|
|
||||||
if let Some(p) = &ARGS.public {
|
|
||||||
// When public path is set as argument use this path for serving static files.
|
|
||||||
// Works only when feature embed_frontend is not set.
|
|
||||||
let public = PathBuf::from(p);
|
|
||||||
|
|
||||||
public_path = if public.is_absolute() {
|
|
||||||
public.to_path_buf()
|
|
||||||
} else {
|
|
||||||
env::current_dir().unwrap_or_default().join(public)
|
|
||||||
}
|
|
||||||
.clean();
|
|
||||||
} else if cfg!(debug_assertions) && dev_path.is_dir() {
|
|
||||||
public_path = dev_path;
|
|
||||||
}
|
|
||||||
|
|
||||||
public_path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn read_log_file(channel_id: &i32, date: &str) -> Result<String, ServiceError> {
|
|
||||||
let date_str = if date.is_empty() {
|
|
||||||
"".to_string()
|
|
||||||
} else {
|
|
||||||
format!("_{date}")
|
|
||||||
};
|
|
||||||
|
|
||||||
let log_path = log_file_path()
|
|
||||||
.join(format!("ffplayout_{channel_id}{date_str}.log"))
|
|
||||||
.clean();
|
|
||||||
let file_size = fs::metadata(&log_path).await?.len() as f64;
|
|
||||||
|
|
||||||
let log_content = if file_size > 5000000.0 {
|
|
||||||
error!("Log file to big: {}", sizeof_fmt(file_size));
|
|
||||||
format!("The log file is larger ({}) than the hard limit of 5MB, the probability is very high that something is wrong with the playout.\nCheck this on the server with `less {log_path:?}`.", sizeof_fmt(file_size))
|
|
||||||
} else {
|
|
||||||
fs::read_to_string(log_path).await?
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(log_content)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// get human readable file size
|
|
||||||
pub fn sizeof_fmt(mut num: f64) -> String {
|
|
||||||
let suffix = 'B';
|
|
||||||
|
|
||||||
for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"] {
|
|
||||||
if num.abs() < 1024.0 {
|
|
||||||
return format!("{num:.1}{unit}{suffix}");
|
|
||||||
}
|
|
||||||
num /= 1024.0;
|
|
||||||
}
|
|
||||||
|
|
||||||
format!("{num:.1}Yi{suffix}")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn local_utc_offset() -> i32 {
|
|
||||||
let mut offset = Local::now().format("%:z").to_string();
|
|
||||||
let operator = offset.remove(0);
|
|
||||||
let mut utc_offset = 0;
|
|
||||||
|
|
||||||
if let Some((r, f)) = offset.split_once(':') {
|
|
||||||
utc_offset = r.parse::<i32>().unwrap_or(0) * 60 + f.parse::<i32>().unwrap_or(0);
|
|
||||||
|
|
||||||
if operator == '-' && utc_offset > 0 {
|
|
||||||
utc_offset = -utc_offset;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
utc_offset
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn naive_date_time_from_str<'de, D>(deserializer: D) -> Result<NaiveDateTime, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s: String = Deserialize::deserialize(deserializer)?;
|
|
||||||
|
|
||||||
match NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S") {
|
|
||||||
Ok(date_time) => Ok(date_time),
|
|
||||||
Err(e) => {
|
|
||||||
if e.kind() == ParseErrorKind::TooShort {
|
|
||||||
NaiveDateTime::parse_from_str(&format!("{s}T00:00:00"), "%Y-%m-%dT%H:%M:%S")
|
|
||||||
.map_err(de::Error::custom)
|
|
||||||
} else {
|
|
||||||
NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S%#z").map_err(de::Error::custom)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// get a free tcp socket
|
|
||||||
pub fn gen_tcp_socket(exclude_socket: String) -> Option<String> {
|
|
||||||
for _ in 0..100 {
|
|
||||||
let port = rand::thread_rng().gen_range(45321..54268);
|
|
||||||
let socket = format!("127.0.0.1:{port}");
|
|
||||||
|
|
||||||
if socket != exclude_socket && TcpListener::bind(("127.0.0.1", port)).is_ok() {
|
|
||||||
return Some(socket);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn round_to_nearest_ten(num: i64) -> i64 {
|
|
||||||
if num % 10 >= 5 {
|
|
||||||
((num / 10) + 1) * 10
|
|
||||||
} else {
|
|
||||||
(num / 10) * 10
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn copy_assets(storage_path: &Path) -> Result<(), std::io::Error> {
|
|
||||||
if storage_path.is_dir() {
|
|
||||||
let target = storage_path.join("00-assets");
|
|
||||||
let mut dummy_source = Path::new("/usr/share/ffplayout/dummy.vtt");
|
|
||||||
let mut font_source = Path::new("/usr/share/ffplayout/DejaVuSans.ttf");
|
|
||||||
let mut logo_source = Path::new("/usr/share/ffplayout/logo.png");
|
|
||||||
|
|
||||||
if !dummy_source.is_file() {
|
|
||||||
dummy_source = Path::new("./assets/dummy.vtt")
|
|
||||||
}
|
|
||||||
if !font_source.is_file() {
|
|
||||||
font_source = Path::new("./assets/DejaVuSans.ttf")
|
|
||||||
}
|
|
||||||
if !logo_source.is_file() {
|
|
||||||
logo_source = Path::new("./assets/logo.png")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !target.is_dir() {
|
|
||||||
let dummy_target = target.join("dummy.vtt");
|
|
||||||
let font_target = target.join("DejaVuSans.ttf");
|
|
||||||
let logo_target = target.join("logo.png");
|
|
||||||
|
|
||||||
fs::create_dir_all(&target).await?;
|
|
||||||
fs::copy(&dummy_source, &dummy_target).await?;
|
|
||||||
fs::copy(&font_source, &font_target).await?;
|
|
||||||
fs::copy(&logo_source, &logo_target).await?;
|
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
{
|
|
||||||
let uid = nix::unistd::Uid::current();
|
|
||||||
let parent_owner = storage_path.metadata().unwrap().uid();
|
|
||||||
|
|
||||||
if uid.is_root() && uid.to_string() != parent_owner.to_string() {
|
|
||||||
let user = nix::unistd::User::from_uid(parent_owner.into())
|
|
||||||
.unwrap_or_default()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
nix::unistd::chown(&target, Some(user.uid), Some(user.gid))?;
|
|
||||||
|
|
||||||
if dummy_target.is_file() {
|
|
||||||
nix::unistd::chown(&dummy_target, Some(user.uid), Some(user.gid))?;
|
|
||||||
}
|
|
||||||
if font_target.is_file() {
|
|
||||||
nix::unistd::chown(&font_target, Some(user.uid), Some(user.gid))?;
|
|
||||||
}
|
|
||||||
if logo_target.is_file() {
|
|
||||||
nix::unistd::chown(&logo_target, Some(user.uid), Some(user.gid))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error!("Storage path {storage_path:?} not exists!");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Combined function to check if the program is running inside a container.
|
|
||||||
/// Returns `true` if running inside a container, otherwise `false`.
|
|
||||||
pub async fn is_running_in_container() -> bool {
|
|
||||||
// Check for Docker or Podman specific files
|
|
||||||
if Path::new("/.dockerenv").exists() || Path::new("/run/.containerenv").exists() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run `systemd-detect-virt -c` to check if we are in a container
|
|
||||||
if let Ok(output) = Command::new("systemd-detect-virt").arg("-c").output().await {
|
|
||||||
return output.status.success();
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
@ -1,27 +0,0 @@
|
|||||||
use std::process::Command;
|
|
||||||
|
|
||||||
use log::*;
|
|
||||||
|
|
||||||
use crate::player::utils::get_data_map;
|
|
||||||
|
|
||||||
use crate::player::controller::ChannelManager;
|
|
||||||
|
|
||||||
pub fn run(manager: ChannelManager) {
|
|
||||||
let task_path = manager.config.lock().unwrap().task.path.clone();
|
|
||||||
|
|
||||||
let obj = serde_json::to_string(&get_data_map(&manager)).unwrap();
|
|
||||||
trace!("Run task: {obj}");
|
|
||||||
|
|
||||||
match Command::new(task_path).arg(obj).spawn() {
|
|
||||||
Ok(mut c) => {
|
|
||||||
let status = c.wait().expect("Error in waiting for the task process!");
|
|
||||||
|
|
||||||
if !status.success() {
|
|
||||||
error!("Process stops with error.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Couldn't spawn task runner: {e}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
60
ffplayout-api/Cargo.toml
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
[package]
|
||||||
|
name = "ffplayout-api"
|
||||||
|
description = "Rest API for ffplayout"
|
||||||
|
readme = "README.md"
|
||||||
|
version.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["embed_frontend"]
|
||||||
|
embed_frontend = []
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
ffplayout-lib = { path = "../lib" }
|
||||||
|
actix-files = "0.6"
|
||||||
|
actix-multipart = "0.6"
|
||||||
|
actix-web = "4"
|
||||||
|
actix-web-grants = "4"
|
||||||
|
actix-web-httpauth = "0.8"
|
||||||
|
actix-web-lab = "0.20"
|
||||||
|
actix-web-static-files = "4.0"
|
||||||
|
argon2 = "0.5"
|
||||||
|
chrono = { version = "0.4", default-features = false, features = ["clock", "std"] }
|
||||||
|
clap = { version = "4.3", features = ["derive"] }
|
||||||
|
derive_more = "0.99"
|
||||||
|
faccess = "0.2"
|
||||||
|
futures-util = { version = "0.3", default-features = false, features = ["std"] }
|
||||||
|
home = "0.5"
|
||||||
|
jsonwebtoken = "9"
|
||||||
|
lazy_static = "1.4"
|
||||||
|
lexical-sort = "0.3"
|
||||||
|
local-ip-address = "0.6"
|
||||||
|
once_cell = "1.18"
|
||||||
|
parking_lot = "0.12"
|
||||||
|
path-clean = "1.0"
|
||||||
|
rand = "0.8"
|
||||||
|
regex = "1"
|
||||||
|
relative-path = "1.8"
|
||||||
|
reqwest = { version = "0.12", default-features = false, features = ["blocking", "json", "rustls-tls"] }
|
||||||
|
rpassword = "7.2"
|
||||||
|
sanitize-filename = "0.5"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
simplelog = { version = "0.12", features = ["paris"] }
|
||||||
|
static-files = "0.2"
|
||||||
|
sysinfo ={ version = "0.30", features = ["linux-netdevs"] }
|
||||||
|
sqlx = { version = "0.7", features = ["runtime-tokio", "sqlite"] }
|
||||||
|
tokio = { version = "1.29", features = ["full"] }
|
||||||
|
tokio-stream = "0.1"
|
||||||
|
toml_edit = {version ="0.22", features = ["serde"]}
|
||||||
|
uuid = "1.8"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
static-files = "0.2"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "ffpapi"
|
||||||
|
path = "src/main.rs"
|
63
ffplayout-api/README.md
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
**ffplayout-api**
|
||||||
|
================
|
||||||
|
|
||||||
|
ffplayout-api (ffpapi) is a non strict REST API for ffplayout. It makes it possible to control the engine, read and manipulate the config, save playlist, etc.
|
||||||
|
|
||||||
|
To be able to use the API it is necessary to initialize the settings database first. To do that, run:
|
||||||
|
|
||||||
|
```BASH
|
||||||
|
ffpapi -i
|
||||||
|
```
|
||||||
|
|
||||||
|
Then add an admin user:
|
||||||
|
|
||||||
|
```BASH
|
||||||
|
ffpapi -u <USERNAME> -p <PASSWORD> -m <MAIL ADDRESS>
|
||||||
|
```
|
||||||
|
|
||||||
|
Then run the API thru the systemd service, or like:
|
||||||
|
|
||||||
|
```BASH
|
||||||
|
ffpapi -l 127.0.0.1:8787
|
||||||
|
```
|
||||||
|
|
||||||
|
Possible Arguments
|
||||||
|
-----
|
||||||
|
|
||||||
|
```BASH
|
||||||
|
OPTIONS:
|
||||||
|
-a, --ask ask for user credentials
|
||||||
|
-d, --domain <DOMAIN> domain name for initialization
|
||||||
|
-h, --help Print help information
|
||||||
|
-i, --init Initialize Database
|
||||||
|
-l, --listen <LISTEN> Listen on IP:PORT, like: 127.0.0.1:8787
|
||||||
|
-m, --mail <MAIL> Admin mail address
|
||||||
|
-p, --password <PASSWORD> Admin password
|
||||||
|
-u, --username <USERNAME> Create admin user
|
||||||
|
-V, --version Print version information
|
||||||
|
```
|
||||||
|
|
||||||
|
If you plan to run ffpapi with systemd set permission from **/usr/share/ffplayout** and content to user **ffpu:ffpu**. User **ffpu** has to be created.
|
||||||
|
|
||||||
|
**For possible endpoints read: [api endpoints](/docs/api.md)**
|
||||||
|
|
||||||
|
ffpapi can also serve the browser based frontend, just run in your browser `127.0.0.1:8787`.
|
||||||
|
|
||||||
|
"Piggyback" Mode
|
||||||
|
-----
|
||||||
|
|
||||||
|
ffplayout was originally planned to run under Linux as a SystemD service. It is also designed so that the engine and ffpapi run completely independently of each other. This is to increase flexibility and stability.
|
||||||
|
|
||||||
|
Nevertheless, programs compiled in Rust can basically run on all systems supported by the language. And so this repo also offers binaries for other platforms.
|
||||||
|
|
||||||
|
In the past, however, it was only possible under Linux to start/stop/restart the ffplayout engine process through ffpapi. This limit no longer exists since v0.17.0, because the "piggyback" mode was introduced here. This means that ffpapi recognizes which platform it is running on, and if it is not on Linux, it starts the engine as a child process. Thus it is now possible to control ffplayout engine completely on all platforms. The disadvantage here is that the engine process is dependent on ffpapi; if it closes or crashes, the engine also closes.
|
||||||
|
|
||||||
|
Under Linux, this mode can be simulated by starting ffpapi with the environment variable `PIGGYBACK_MODE=true`. This scenario is also conceivable in container operation, for example.
|
||||||
|
|
||||||
|
**Run in piggyback mode:**
|
||||||
|
|
||||||
|
```BASH
|
||||||
|
PIGGYBACK_MODE=True ffpapi -l 127.0.0.1:8787
|
||||||
|
```
|
||||||
|
|
||||||
|
This function is experimental, use it with caution.
|
@ -2,10 +2,10 @@ use static_files::NpmBuild;
|
|||||||
|
|
||||||
fn main() -> std::io::Result<()> {
|
fn main() -> std::io::Result<()> {
|
||||||
if !cfg!(debug_assertions) && cfg!(feature = "embed_frontend") {
|
if !cfg!(debug_assertions) && cfg!(feature = "embed_frontend") {
|
||||||
NpmBuild::new("../frontend")
|
NpmBuild::new("../ffplayout-frontend")
|
||||||
.install()?
|
.install()?
|
||||||
.run("generate")?
|
.run("generate")?
|
||||||
.target("../frontend/.output/public")
|
.target("../ffplayout-frontend/.output/public")
|
||||||
.change_detection()
|
.change_detection()
|
||||||
.to_resource_dir()
|
.to_resource_dir()
|
||||||
.build()
|
.build()
|
@ -4,10 +4,7 @@ use chrono::{TimeDelta, Utc};
|
|||||||
use jsonwebtoken::{self, DecodingKey, EncodingKey, Header, Validation};
|
use jsonwebtoken::{self, DecodingKey, EncodingKey, Header, Validation};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{
|
use crate::utils::{GlobalSettings, Role};
|
||||||
db::models::{GlobalSettings, Role},
|
|
||||||
utils::errors::ServiceError,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Token lifetime
|
// Token lifetime
|
||||||
const JWT_EXPIRATION_DAYS: i64 = 7;
|
const JWT_EXPIRATION_DAYS: i64 = 7;
|
||||||
@ -15,17 +12,15 @@ const JWT_EXPIRATION_DAYS: i64 = 7;
|
|||||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
||||||
pub struct Claims {
|
pub struct Claims {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub channels: Vec<i32>,
|
|
||||||
pub username: String,
|
pub username: String,
|
||||||
pub role: Role,
|
pub role: Role,
|
||||||
exp: i64,
|
exp: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Claims {
|
impl Claims {
|
||||||
pub fn new(id: i32, channels: Vec<i32>, username: String, role: Role) -> Self {
|
pub fn new(id: i32, username: String, role: Role) -> Self {
|
||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
channels,
|
|
||||||
username,
|
username,
|
||||||
role,
|
role,
|
||||||
exp: (Utc::now() + TimeDelta::try_days(JWT_EXPIRATION_DAYS).unwrap()).timestamp(),
|
exp: (Utc::now() + TimeDelta::try_days(JWT_EXPIRATION_DAYS).unwrap()).timestamp(),
|
||||||
@ -34,20 +29,17 @@ impl Claims {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create a json web token (JWT)
|
/// Create a json web token (JWT)
|
||||||
pub async fn create_jwt(claims: Claims) -> Result<String, ServiceError> {
|
pub fn create_jwt(claims: Claims) -> Result<String, Error> {
|
||||||
let config = GlobalSettings::global();
|
let config = GlobalSettings::global();
|
||||||
let encoding_key = EncodingKey::from_secret(config.secret.clone().unwrap().as_bytes());
|
let encoding_key = EncodingKey::from_secret(config.secret.as_bytes());
|
||||||
Ok(jsonwebtoken::encode(
|
jsonwebtoken::encode(&Header::default(), &claims, &encoding_key)
|
||||||
&Header::default(),
|
.map_err(|e| ErrorUnauthorized(e.to_string()))
|
||||||
&claims,
|
|
||||||
&encoding_key,
|
|
||||||
)?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decode a json web token (JWT)
|
/// Decode a json web token (JWT)
|
||||||
pub async fn decode_jwt(token: &str) -> Result<Claims, Error> {
|
pub async fn decode_jwt(token: &str) -> Result<Claims, Error> {
|
||||||
let config = GlobalSettings::global();
|
let config = GlobalSettings::global();
|
||||||
let decoding_key = DecodingKey::from_secret(config.secret.clone().unwrap().as_bytes());
|
let decoding_key = DecodingKey::from_secret(config.secret.as_bytes());
|
||||||
jsonwebtoken::decode::<Claims>(token, &decoding_key, &Validation::default())
|
jsonwebtoken::decode::<Claims>(token, &decoding_key, &Validation::default())
|
||||||
.map(|data| data.claims)
|
.map(|data| data.claims)
|
||||||
.map_err(|e| ErrorUnauthorized(e.to_string()))
|
.map_err(|e| ErrorUnauthorized(e.to_string()))
|
352
ffplayout-api/src/db/handles.rs
Normal file
@ -0,0 +1,352 @@
|
|||||||
|
use std::env;
|
||||||
|
|
||||||
|
use argon2::{
|
||||||
|
password_hash::{rand_core::OsRng, SaltString},
|
||||||
|
Argon2, PasswordHasher,
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand::{distributions::Alphanumeric, Rng};
|
||||||
|
use simplelog::*;
|
||||||
|
use sqlx::{migrate::MigrateDatabase, sqlite::SqliteQueryResult, Pool, Sqlite};
|
||||||
|
use tokio::task;
|
||||||
|
|
||||||
|
use crate::db::{
|
||||||
|
db_pool,
|
||||||
|
models::{Channel, TextPreset, User},
|
||||||
|
};
|
||||||
|
use crate::utils::{db_path, local_utc_offset, GlobalSettings, Role};
|
||||||
|
|
||||||
|
async fn create_schema(conn: &Pool<Sqlite>) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query = "PRAGMA foreign_keys = ON;
|
||||||
|
CREATE TABLE IF NOT EXISTS global
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
secret TEXT NOT NULL,
|
||||||
|
UNIQUE(secret)
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS roles
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
UNIQUE(name)
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS channels
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
preview_url TEXT NOT NULL,
|
||||||
|
config_path TEXT NOT NULL,
|
||||||
|
extra_extensions TEXT NOT NULL,
|
||||||
|
service TEXT NOT NULL,
|
||||||
|
UNIQUE(name, service)
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS presets
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
text TEXT NOT NULL,
|
||||||
|
x TEXT NOT NULL,
|
||||||
|
y TEXT NOT NULL,
|
||||||
|
fontsize TEXT NOT NULL,
|
||||||
|
line_spacing TEXT NOT NULL,
|
||||||
|
fontcolor TEXT NOT NULL,
|
||||||
|
box TEXT NOT NULL,
|
||||||
|
boxcolor TEXT NOT NULL,
|
||||||
|
boxborderw TEXT NOT NULL,
|
||||||
|
alpha TEXT NOT NULL,
|
||||||
|
channel_id INTEGER NOT NULL DEFAULT 1,
|
||||||
|
FOREIGN KEY (channel_id) REFERENCES channels (id) ON UPDATE SET NULL ON DELETE SET NULL,
|
||||||
|
UNIQUE(name)
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS user
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
mail TEXT NOT NULL,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
password TEXT NOT NULL,
|
||||||
|
role_id INTEGER NOT NULL DEFAULT 2,
|
||||||
|
channel_id INTEGER NOT NULL DEFAULT 1,
|
||||||
|
FOREIGN KEY (role_id) REFERENCES roles (id) ON UPDATE SET NULL ON DELETE SET NULL,
|
||||||
|
FOREIGN KEY (channel_id) REFERENCES channels (id) ON UPDATE SET NULL ON DELETE SET NULL,
|
||||||
|
UNIQUE(mail, username)
|
||||||
|
);";
|
||||||
|
|
||||||
|
sqlx::query(query).execute(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn db_init(domain: Option<String>) -> Result<&'static str, Box<dyn std::error::Error>> {
|
||||||
|
let db_path = db_path()?;
|
||||||
|
|
||||||
|
if !Sqlite::database_exists(db_path).await.unwrap_or(false) {
|
||||||
|
Sqlite::create_database(db_path).await.unwrap();
|
||||||
|
|
||||||
|
let pool = db_pool().await?;
|
||||||
|
|
||||||
|
match create_schema(&pool).await {
|
||||||
|
Ok(_) => info!("Database created Successfully"),
|
||||||
|
Err(e) => panic!("{e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let secret: String = rand::thread_rng()
|
||||||
|
.sample_iter(&Alphanumeric)
|
||||||
|
.take(80)
|
||||||
|
.map(char::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let url = match domain {
|
||||||
|
Some(d) => format!("http://{d}/live/stream.m3u8"),
|
||||||
|
None => "http://localhost/live/stream.m3u8".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let config_path = if env::consts::OS == "linux" {
|
||||||
|
"/etc/ffplayout/ffplayout.toml"
|
||||||
|
} else {
|
||||||
|
"./assets/ffplayout.toml"
|
||||||
|
};
|
||||||
|
|
||||||
|
let query = "CREATE TRIGGER global_row_count
|
||||||
|
BEFORE INSERT ON global
|
||||||
|
WHEN (SELECT COUNT(*) FROM global) >= 1
|
||||||
|
BEGIN
|
||||||
|
SELECT RAISE(FAIL, 'Database is already initialized!');
|
||||||
|
END;
|
||||||
|
INSERT INTO global(secret) VALUES($1);
|
||||||
|
INSERT INTO channels(name, preview_url, config_path, extra_extensions, service)
|
||||||
|
VALUES('Channel 1', $2, $3, 'jpg,jpeg,png', 'ffplayout.service');
|
||||||
|
INSERT INTO roles(name) VALUES('admin'), ('user'), ('guest');
|
||||||
|
INSERT INTO presets(name, text, x, y, fontsize, line_spacing, fontcolor, box, boxcolor, boxborderw, alpha, channel_id)
|
||||||
|
VALUES('Default', 'Wellcome to ffplayout messenger!', '(w-text_w)/2', '(h-text_h)/2', '24', '4', '#ffffff@0xff', '0', '#000000@0x80', '4', '1.0', '1'),
|
||||||
|
('Empty Text', '', '0', '0', '24', '4', '#000000', '0', '#000000', '0', '0', '1'),
|
||||||
|
('Bottom Text fade in', 'The upcoming event will be delayed by a few minutes.', '(w-text_w)/2', '(h-line_h)*0.9', '24', '4', '#ffffff',
|
||||||
|
'1', '#000000@0x80', '4', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),0,if(lt(t,ld(1)+2),(t-(ld(1)+1))/1,if(lt(t,ld(1)+8),1,if(lt(t,ld(1)+9),(1-(t-(ld(1)+8)))/1,0))))', '1'),
|
||||||
|
('Scrolling Text', 'We have a very important announcement to make.', 'ifnot(ld(1),st(1,t));if(lt(t,ld(1)+1),w+4,w-w/12*mod(t-ld(1),12*(w+tw)/w))', '(h-line_h)*0.9',
|
||||||
|
'24', '4', '#ffffff', '1', '#000000@0x80', '4', '1.0', '1');";
|
||||||
|
|
||||||
|
let pool = db_pool().await?;
|
||||||
|
|
||||||
|
sqlx::query(query)
|
||||||
|
.bind(secret)
|
||||||
|
.bind(url)
|
||||||
|
.bind(config_path)
|
||||||
|
.execute(&pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok("Database initialized!")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_global(conn: &Pool<Sqlite>) -> Result<GlobalSettings, sqlx::Error> {
|
||||||
|
let query = "SELECT secret FROM global WHERE id = 1";
|
||||||
|
|
||||||
|
sqlx::query_as(query).fetch_one(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_channel(conn: &Pool<Sqlite>, id: &i32) -> Result<Channel, sqlx::Error> {
|
||||||
|
let query = "SELECT * FROM channels WHERE id = $1";
|
||||||
|
let mut result: Channel = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
|
||||||
|
|
||||||
|
result.utc_offset = local_utc_offset();
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_all_channels(conn: &Pool<Sqlite>) -> Result<Vec<Channel>, sqlx::Error> {
|
||||||
|
let query = "SELECT * FROM channels";
|
||||||
|
let mut results: Vec<Channel> = sqlx::query_as(query).fetch_all(conn).await?;
|
||||||
|
|
||||||
|
for result in results.iter_mut() {
|
||||||
|
result.utc_offset = local_utc_offset();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(results)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_channel(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
|
channel: Channel,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query = "UPDATE channels SET name = $2, preview_url = $3, config_path = $4, extra_extensions = $5 WHERE id = $1";
|
||||||
|
|
||||||
|
sqlx::query(query)
|
||||||
|
.bind(id)
|
||||||
|
.bind(channel.name)
|
||||||
|
.bind(channel.preview_url)
|
||||||
|
.bind(channel.config_path)
|
||||||
|
.bind(channel.extra_extensions)
|
||||||
|
.execute(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert_channel(conn: &Pool<Sqlite>, channel: Channel) -> Result<Channel, sqlx::Error> {
|
||||||
|
let query = "INSERT INTO channels (name, preview_url, config_path, extra_extensions, service) VALUES($1, $2, $3, $4, $5)";
|
||||||
|
let result = sqlx::query(query)
|
||||||
|
.bind(channel.name)
|
||||||
|
.bind(channel.preview_url)
|
||||||
|
.bind(channel.config_path)
|
||||||
|
.bind(channel.extra_extensions)
|
||||||
|
.bind(channel.service)
|
||||||
|
.execute(conn)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
sqlx::query_as("SELECT * FROM channels WHERE id = $1")
|
||||||
|
.bind(result.last_insert_rowid())
|
||||||
|
.fetch_one(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_channel(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
id: &i32,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query = "DELETE FROM channels WHERE id = $1";
|
||||||
|
|
||||||
|
sqlx::query(query).bind(id).execute(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_last_channel(conn: &Pool<Sqlite>) -> Result<i32, sqlx::Error> {
|
||||||
|
let query = "SELECT id FROM channels ORDER BY id DESC LIMIT 1;";
|
||||||
|
|
||||||
|
sqlx::query_scalar(query).fetch_one(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_role(conn: &Pool<Sqlite>, id: &i32) -> Result<Role, sqlx::Error> {
|
||||||
|
let query = "SELECT name FROM roles WHERE id = $1";
|
||||||
|
let result: Role = sqlx::query_as(query).bind(id).fetch_one(conn).await?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_login(conn: &Pool<Sqlite>, user: &str) -> Result<User, sqlx::Error> {
|
||||||
|
let query = "SELECT id, mail, username, password, role_id FROM user WHERE username = $1";
|
||||||
|
|
||||||
|
sqlx::query_as(query).bind(user).fetch_one(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_user(conn: &Pool<Sqlite>, user: &str) -> Result<User, sqlx::Error> {
|
||||||
|
let query = "SELECT id, mail, username, role_id FROM user WHERE username = $1";
|
||||||
|
|
||||||
|
sqlx::query_as(query).bind(user).fetch_one(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_user_by_id(conn: &Pool<Sqlite>, id: i32) -> Result<User, sqlx::Error> {
|
||||||
|
let query = "SELECT id, mail, username, role_id FROM user WHERE id = $1";
|
||||||
|
|
||||||
|
sqlx::query_as(query).bind(id).fetch_one(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_users(conn: &Pool<Sqlite>) -> Result<Vec<User>, sqlx::Error> {
|
||||||
|
let query = "SELECT id, username FROM user";
|
||||||
|
|
||||||
|
sqlx::query_as(query).fetch_all(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert_user(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
user: User,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let password_hash = task::spawn_blocking(move || {
|
||||||
|
let salt = SaltString::generate(&mut OsRng);
|
||||||
|
let hash = Argon2::default()
|
||||||
|
.hash_password(user.password.clone().as_bytes(), &salt)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
hash.to_string()
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let query = "INSERT INTO user (mail, username, password, role_id) VALUES($1, $2, $3, $4)";
|
||||||
|
|
||||||
|
sqlx::query(query)
|
||||||
|
.bind(user.mail)
|
||||||
|
.bind(user.username)
|
||||||
|
.bind(password_hash)
|
||||||
|
.bind(user.role_id)
|
||||||
|
.execute(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_user(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
|
fields: String,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query = format!("UPDATE user SET {fields} WHERE id = $1");
|
||||||
|
|
||||||
|
sqlx::query(&query).bind(id).execute(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_user(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
name: &str,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query = "DELETE FROM user WHERE username = $1;";
|
||||||
|
|
||||||
|
sqlx::query(query).bind(name).execute(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn select_presets(conn: &Pool<Sqlite>, id: i32) -> Result<Vec<TextPreset>, sqlx::Error> {
|
||||||
|
let query = "SELECT * FROM presets WHERE channel_id = $1";
|
||||||
|
|
||||||
|
sqlx::query_as(query).bind(id).fetch_all(conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_preset(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
id: &i32,
|
||||||
|
preset: TextPreset,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query =
|
||||||
|
"UPDATE presets SET name = $1, text = $2, x = $3, y = $4, fontsize = $5, line_spacing = $6,
|
||||||
|
fontcolor = $7, alpha = $8, box = $9, boxcolor = $10, boxborderw = $11 WHERE id = $12";
|
||||||
|
|
||||||
|
sqlx::query(query)
|
||||||
|
.bind(preset.name)
|
||||||
|
.bind(preset.text)
|
||||||
|
.bind(preset.x)
|
||||||
|
.bind(preset.y)
|
||||||
|
.bind(preset.fontsize)
|
||||||
|
.bind(preset.line_spacing)
|
||||||
|
.bind(preset.fontcolor)
|
||||||
|
.bind(preset.alpha)
|
||||||
|
.bind(preset.r#box)
|
||||||
|
.bind(preset.boxcolor)
|
||||||
|
.bind(preset.boxborderw)
|
||||||
|
.bind(id)
|
||||||
|
.execute(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert_preset(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
preset: TextPreset,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query =
|
||||||
|
"INSERT INTO presets (channel_id, name, text, x, y, fontsize, line_spacing, fontcolor, alpha, box, boxcolor, boxborderw)
|
||||||
|
VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)";
|
||||||
|
|
||||||
|
sqlx::query(query)
|
||||||
|
.bind(preset.channel_id)
|
||||||
|
.bind(preset.name)
|
||||||
|
.bind(preset.text)
|
||||||
|
.bind(preset.x)
|
||||||
|
.bind(preset.y)
|
||||||
|
.bind(preset.fontsize)
|
||||||
|
.bind(preset.line_spacing)
|
||||||
|
.bind(preset.fontcolor)
|
||||||
|
.bind(preset.alpha)
|
||||||
|
.bind(preset.r#box)
|
||||||
|
.bind(preset.boxcolor)
|
||||||
|
.bind(preset.boxborderw)
|
||||||
|
.execute(conn)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_preset(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
id: &i32,
|
||||||
|
) -> Result<SqliteQueryResult, sqlx::Error> {
|
||||||
|
let query = "DELETE FROM presets WHERE id = $1;";
|
||||||
|
|
||||||
|
sqlx::query(query).bind(id).execute(conn).await
|
||||||
|
}
|
13
ffplayout-api/src/db/mod.rs
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
use sqlx::{Pool, Sqlite, SqlitePool};
|
||||||
|
|
||||||
|
pub mod handles;
|
||||||
|
pub mod models;
|
||||||
|
|
||||||
|
use crate::utils::db_path;
|
||||||
|
|
||||||
|
pub async fn db_pool() -> Result<Pool<Sqlite>, sqlx::Error> {
|
||||||
|
let db_path = db_path().unwrap();
|
||||||
|
let conn = SqlitePool::connect(db_path).await?;
|
||||||
|
|
||||||
|
Ok(conn)
|
||||||
|
}
|
118
ffplayout-api/src/db/models.rs
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
use regex::Regex;
|
||||||
|
use serde::{
|
||||||
|
de::{self, Visitor},
|
||||||
|
Deserialize, Serialize,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, sqlx::FromRow)]
|
||||||
|
pub struct User {
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub id: i32,
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub mail: Option<String>,
|
||||||
|
pub username: String,
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(skip_serializing, default = "empty_string")]
|
||||||
|
pub password: String,
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub role_id: Option<i32>,
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub channel_id: Option<i32>,
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn empty_string() -> String {
|
||||||
|
"".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
|
pub struct LoginUser {
|
||||||
|
pub id: i32,
|
||||||
|
pub username: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LoginUser {
|
||||||
|
pub fn new(id: i32, username: String) -> Self {
|
||||||
|
Self { id, username }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Clone, sqlx::FromRow)]
|
||||||
|
pub struct TextPreset {
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub id: i32,
|
||||||
|
pub channel_id: i32,
|
||||||
|
pub name: String,
|
||||||
|
pub text: String,
|
||||||
|
pub x: String,
|
||||||
|
pub y: String,
|
||||||
|
#[serde(deserialize_with = "deserialize_number_or_string")]
|
||||||
|
pub fontsize: String,
|
||||||
|
#[serde(deserialize_with = "deserialize_number_or_string")]
|
||||||
|
pub line_spacing: String,
|
||||||
|
pub fontcolor: String,
|
||||||
|
pub r#box: String,
|
||||||
|
pub boxcolor: String,
|
||||||
|
#[serde(deserialize_with = "deserialize_number_or_string")]
|
||||||
|
pub boxborderw: String,
|
||||||
|
#[serde(deserialize_with = "deserialize_number_or_string")]
|
||||||
|
pub alpha: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deserialize number or string
|
||||||
|
pub fn deserialize_number_or_string<'de, D>(deserializer: D) -> Result<String, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
struct StringOrNumberVisitor;
|
||||||
|
|
||||||
|
impl<'de> Visitor<'de> for StringOrNumberVisitor {
|
||||||
|
type Value = String;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
formatter.write_str("a string or a number")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
|
||||||
|
let re = Regex::new(r"0,([0-9]+)").unwrap();
|
||||||
|
let clean_string = re.replace_all(value, "0.$1").to_string();
|
||||||
|
Ok(clean_string)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_u64<E: de::Error>(self, value: u64) -> Result<Self::Value, E> {
|
||||||
|
Ok(value.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_i64<E: de::Error>(self, value: i64) -> Result<Self::Value, E> {
|
||||||
|
Ok(value.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
|
||||||
|
Ok(value.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
deserializer.deserialize_any(StringOrNumberVisitor)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, sqlx::FromRow)]
|
||||||
|
pub struct Channel {
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub id: i32,
|
||||||
|
pub name: String,
|
||||||
|
pub preview_url: String,
|
||||||
|
pub config_path: String,
|
||||||
|
pub extra_extensions: String,
|
||||||
|
pub service: String,
|
||||||
|
|
||||||
|
#[sqlx(default)]
|
||||||
|
#[serde(default)]
|
||||||
|
pub utc_offset: i32,
|
||||||
|
}
|
21
ffplayout-api/src/lib.rs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
|
use clap::Parser;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use sysinfo::{Disks, Networks, System};
|
||||||
|
|
||||||
|
pub mod api;
|
||||||
|
pub mod db;
|
||||||
|
pub mod sse;
|
||||||
|
pub mod utils;
|
||||||
|
|
||||||
|
use utils::args_parse::Args;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref ARGS: Args = Args::parse();
|
||||||
|
pub static ref DISKS: Arc<Mutex<Disks>> =
|
||||||
|
Arc::new(Mutex::new(Disks::new_with_refreshed_list()));
|
||||||
|
pub static ref NETWORKS: Arc<Mutex<Networks>> =
|
||||||
|
Arc::new(Mutex::new(Networks::new_with_refreshed_list()));
|
||||||
|
pub static ref SYS: Arc<Mutex<System>> = Arc::new(Mutex::new(System::new_all()));
|
||||||
|
}
|
194
ffplayout-api/src/main.rs
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
use std::{collections::HashSet, env, process::exit, sync::Arc};
|
||||||
|
|
||||||
|
use actix_files::Files;
|
||||||
|
use actix_web::{
|
||||||
|
dev::ServiceRequest, middleware::Logger, web, App, Error, HttpMessage, HttpServer,
|
||||||
|
};
|
||||||
|
use actix_web_grants::authorities::AttachAuthorities;
|
||||||
|
use actix_web_httpauth::{extractors::bearer::BearerAuth, middleware::HttpAuthentication};
|
||||||
|
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
|
||||||
|
use actix_web_static_files::ResourceFiles;
|
||||||
|
|
||||||
|
use path_clean::PathClean;
|
||||||
|
use simplelog::*;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
use ffplayout_api::{
|
||||||
|
api::{auth, routes::*},
|
||||||
|
db::{db_pool, models::LoginUser},
|
||||||
|
sse::{broadcast::Broadcaster, routes::*, AuthState},
|
||||||
|
utils::{control::ProcessControl, db_path, init_config, run_args},
|
||||||
|
ARGS,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
|
||||||
|
use ffplayout_api::utils::public_path;
|
||||||
|
|
||||||
|
use ffplayout_lib::utils::{init_logging, PlayoutConfig};
|
||||||
|
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
||||||
|
|
||||||
|
async fn validator(
|
||||||
|
req: ServiceRequest,
|
||||||
|
credentials: BearerAuth,
|
||||||
|
) -> Result<ServiceRequest, (Error, ServiceRequest)> {
|
||||||
|
// We just get permissions from JWT
|
||||||
|
match auth::decode_jwt(credentials.token()).await {
|
||||||
|
Ok(claims) => {
|
||||||
|
req.attach(vec![claims.role]);
|
||||||
|
|
||||||
|
req.extensions_mut()
|
||||||
|
.insert(LoginUser::new(claims.id, claims.username));
|
||||||
|
|
||||||
|
Ok(req)
|
||||||
|
}
|
||||||
|
Err(e) => Err((e, req)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() -> std::io::Result<()> {
|
||||||
|
let mut config = PlayoutConfig::new(None, None);
|
||||||
|
config.mail.recipient = String::new();
|
||||||
|
config.logging.log_to_file = false;
|
||||||
|
config.logging.timestamp = false;
|
||||||
|
|
||||||
|
let logging = init_logging(&config, None, None);
|
||||||
|
CombinedLogger::init(logging).unwrap();
|
||||||
|
|
||||||
|
if let Err(c) = run_args().await {
|
||||||
|
exit(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
let pool = match db_pool().await {
|
||||||
|
Ok(p) => p,
|
||||||
|
Err(e) => {
|
||||||
|
error!("{e}");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(conn) = &ARGS.listen {
|
||||||
|
if db_path().is_err() {
|
||||||
|
error!("Database is not initialized! Init DB first and add admin user.");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
init_config(&pool).await;
|
||||||
|
let ip_port = conn.split(':').collect::<Vec<&str>>();
|
||||||
|
let addr = ip_port[0];
|
||||||
|
let port = ip_port[1].parse::<u16>().unwrap();
|
||||||
|
let engine_process = web::Data::new(ProcessControl::new());
|
||||||
|
let auth_state = web::Data::new(AuthState {
|
||||||
|
uuids: Mutex::new(HashSet::new()),
|
||||||
|
});
|
||||||
|
let broadcast_data = Broadcaster::create();
|
||||||
|
|
||||||
|
info!("running ffplayout API, listen on http://{conn}");
|
||||||
|
|
||||||
|
// no 'allow origin' here, give it to the reverse proxy
|
||||||
|
HttpServer::new(move || {
|
||||||
|
let auth = HttpAuthentication::bearer(validator);
|
||||||
|
let db_pool = web::Data::new(pool.clone());
|
||||||
|
// Customize logging format to get IP though proxies.
|
||||||
|
let logger = Logger::new("%{r}a \"%r\" %s %b \"%{Referer}i\" \"%{User-Agent}i\" %T")
|
||||||
|
.exclude_regex(r"/_nuxt/*");
|
||||||
|
|
||||||
|
let mut web_app = App::new()
|
||||||
|
.app_data(db_pool)
|
||||||
|
.app_data(engine_process.clone())
|
||||||
|
.app_data(auth_state.clone())
|
||||||
|
.app_data(web::Data::from(Arc::clone(&broadcast_data)))
|
||||||
|
.wrap(logger)
|
||||||
|
.service(login)
|
||||||
|
.service(
|
||||||
|
web::scope("/api")
|
||||||
|
.wrap(auth.clone())
|
||||||
|
.service(add_user)
|
||||||
|
.service(get_user)
|
||||||
|
.service(get_by_name)
|
||||||
|
.service(get_users)
|
||||||
|
.service(remove_user)
|
||||||
|
.service(get_playout_config)
|
||||||
|
.service(update_playout_config)
|
||||||
|
.service(add_preset)
|
||||||
|
.service(get_presets)
|
||||||
|
.service(update_preset)
|
||||||
|
.service(delete_preset)
|
||||||
|
.service(get_channel)
|
||||||
|
.service(get_all_channels)
|
||||||
|
.service(patch_channel)
|
||||||
|
.service(add_channel)
|
||||||
|
.service(remove_channel)
|
||||||
|
.service(update_user)
|
||||||
|
.service(send_text_message)
|
||||||
|
.service(control_playout)
|
||||||
|
.service(media_current)
|
||||||
|
.service(media_next)
|
||||||
|
.service(media_last)
|
||||||
|
.service(process_control)
|
||||||
|
.service(get_playlist)
|
||||||
|
.service(save_playlist)
|
||||||
|
.service(gen_playlist)
|
||||||
|
.service(del_playlist)
|
||||||
|
.service(get_log)
|
||||||
|
.service(file_browser)
|
||||||
|
.service(add_dir)
|
||||||
|
.service(move_rename)
|
||||||
|
.service(remove)
|
||||||
|
.service(save_file)
|
||||||
|
.service(import_playlist)
|
||||||
|
.service(get_program)
|
||||||
|
.service(get_system_stat)
|
||||||
|
.service(generate_uuid),
|
||||||
|
)
|
||||||
|
.service(
|
||||||
|
web::scope("/data")
|
||||||
|
.service(validate_uuid)
|
||||||
|
.service(event_stream),
|
||||||
|
)
|
||||||
|
.service(get_file);
|
||||||
|
|
||||||
|
if let Some(public) = &ARGS.public {
|
||||||
|
// When public path is set as argument use this path for serving extra static files,
|
||||||
|
// is useful for HLS stream etc.
|
||||||
|
let absolute_path = if public.is_absolute() {
|
||||||
|
public.to_path_buf()
|
||||||
|
} else {
|
||||||
|
env::current_dir().unwrap_or_default().join(public)
|
||||||
|
}
|
||||||
|
.clean();
|
||||||
|
|
||||||
|
web_app = web_app.service(Files::new("/", absolute_path));
|
||||||
|
} else {
|
||||||
|
// When no public path is given as argument, use predefine keywords in path,
|
||||||
|
// like /live; /preview; /public, or HLS extensions to recognize file should get from public folder
|
||||||
|
web_app = web_app.service(get_public);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "embed_frontend"))]
|
||||||
|
{
|
||||||
|
// in release mode embed frontend
|
||||||
|
let generated = generate();
|
||||||
|
web_app =
|
||||||
|
web_app.service(ResourceFiles::new("/", generated).resolve_not_found_to_root());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(debug_assertions, not(feature = "embed_frontend")))]
|
||||||
|
{
|
||||||
|
// in debug mode get frontend from path
|
||||||
|
web_app = web_app.service(Files::new("/", public_path()).index_file("index.html"));
|
||||||
|
}
|
||||||
|
|
||||||
|
web_app
|
||||||
|
})
|
||||||
|
.bind((addr, port))?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
} else {
|
||||||
|
error!("Run ffpapi with listen parameter!");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,4 @@
|
|||||||
use std::{
|
use std::{sync::Arc, time::Duration};
|
||||||
sync::{atomic::Ordering, Arc},
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
use actix_web::{rt::time::interval, web};
|
use actix_web::{rt::time::interval, web};
|
||||||
use actix_web_lab::{
|
use actix_web_lab::{
|
||||||
@ -9,24 +6,31 @@ use actix_web_lab::{
|
|||||||
util::InfallibleStream,
|
util::InfallibleStream,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use ffplayout_lib::utils::PlayoutConfig;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
use tokio_stream::wrappers::ReceiverStream;
|
use tokio_stream::wrappers::ReceiverStream;
|
||||||
|
|
||||||
use crate::player::{controller::ChannelManager, utils::get_data_map};
|
use crate::utils::{control::media_info, system};
|
||||||
use crate::utils::system;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
struct Client {
|
struct Client {
|
||||||
manager: ChannelManager,
|
_channel: i32,
|
||||||
|
config: PlayoutConfig,
|
||||||
endpoint: String,
|
endpoint: String,
|
||||||
sender: mpsc::Sender<sse::Event>,
|
sender: mpsc::Sender<sse::Event>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Client {
|
impl Client {
|
||||||
fn new(manager: ChannelManager, endpoint: String, sender: mpsc::Sender<sse::Event>) -> Self {
|
fn new(
|
||||||
|
_channel: i32,
|
||||||
|
config: PlayoutConfig,
|
||||||
|
endpoint: String,
|
||||||
|
sender: mpsc::Sender<sse::Event>,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
manager,
|
_channel,
|
||||||
|
config,
|
||||||
endpoint,
|
endpoint,
|
||||||
sender,
|
sender,
|
||||||
}
|
}
|
||||||
@ -99,7 +103,8 @@ impl Broadcaster {
|
|||||||
/// Registers client with broadcaster, returning an SSE response body.
|
/// Registers client with broadcaster, returning an SSE response body.
|
||||||
pub async fn new_client(
|
pub async fn new_client(
|
||||||
&self,
|
&self,
|
||||||
manager: ChannelManager,
|
channel: i32,
|
||||||
|
config: PlayoutConfig,
|
||||||
endpoint: String,
|
endpoint: String,
|
||||||
) -> Sse<InfallibleStream<ReceiverStream<sse::Event>>> {
|
) -> Sse<InfallibleStream<ReceiverStream<sse::Event>>> {
|
||||||
let (tx, rx) = mpsc::channel(10);
|
let (tx, rx) = mpsc::channel(10);
|
||||||
@ -109,7 +114,7 @@ impl Broadcaster {
|
|||||||
self.inner
|
self.inner
|
||||||
.lock()
|
.lock()
|
||||||
.clients
|
.clients
|
||||||
.push(Client::new(manager, endpoint, tx));
|
.push(Client::new(channel, config, endpoint, tx));
|
||||||
|
|
||||||
Sse::from_infallible_receiver(rx)
|
Sse::from_infallible_receiver(rx)
|
||||||
}
|
}
|
||||||
@ -119,22 +124,23 @@ impl Broadcaster {
|
|||||||
let clients = self.inner.lock().clients.clone();
|
let clients = self.inner.lock().clients.clone();
|
||||||
|
|
||||||
for client in clients.iter().filter(|client| client.endpoint == "playout") {
|
for client in clients.iter().filter(|client| client.endpoint == "playout") {
|
||||||
let media_map = get_data_map(&client.manager);
|
match media_info(&client.config, "current".into()).await {
|
||||||
|
Ok(res) => {
|
||||||
if client.manager.is_alive.load(Ordering::SeqCst) {
|
let _ = client
|
||||||
let _ = client
|
.sender
|
||||||
.sender
|
.send(
|
||||||
.send(
|
sse::Data::new(res.text().await.unwrap_or_else(|_| "Success".into()))
|
||||||
sse::Data::new(serde_json::to_string(&media_map).unwrap_or_default())
|
.into(),
|
||||||
.into(),
|
)
|
||||||
)
|
.await;
|
||||||
.await;
|
}
|
||||||
} else {
|
Err(_) => {
|
||||||
let _ = client
|
let _ = client
|
||||||
.sender
|
.sender
|
||||||
.send(sse::Data::new("not running").into())
|
.send(sse::Data::new("not running").into())
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -144,8 +150,7 @@ impl Broadcaster {
|
|||||||
|
|
||||||
for client in clients {
|
for client in clients {
|
||||||
if &client.endpoint == "system" {
|
if &client.endpoint == "system" {
|
||||||
let config = client.manager.config.lock().unwrap().clone();
|
if let Ok(stat) = web::block(move || system::stat(client.config.clone())).await {
|
||||||
if let Ok(stat) = web::block(move || system::stat(config.clone())).await {
|
|
||||||
let stat_string = stat.to_string();
|
let stat_string = stat.to_string();
|
||||||
let _ = client.sender.send(sse::Data::new(stat_string).into()).await;
|
let _ = client.sender.send(sse::Data::new(stat_string).into()).await;
|
||||||
};
|
};
|
@ -32,7 +32,7 @@ impl Default for UuidData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SseAuthState {
|
pub struct AuthState {
|
||||||
pub uuids: Mutex<HashSet<UuidData>>,
|
pub uuids: Mutex<HashSet<UuidData>>,
|
||||||
}
|
}
|
||||||
|
|
@ -1,14 +1,11 @@
|
|||||||
use std::sync::Mutex;
|
|
||||||
|
|
||||||
use actix_web::{get, post, web, Responder};
|
use actix_web::{get, post, web, Responder};
|
||||||
use actix_web_grants::proc_macro::protect;
|
use actix_web_grants::proc_macro::protect;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{Pool, Sqlite};
|
||||||
|
|
||||||
use super::{check_uuid, prune_uuids, SseAuthState, UuidData};
|
use super::{check_uuid, prune_uuids, AuthState, UuidData};
|
||||||
use crate::db::models::Role;
|
|
||||||
use crate::player::controller::ChannelController;
|
|
||||||
use crate::sse::broadcast::Broadcaster;
|
use crate::sse::broadcast::Broadcaster;
|
||||||
use crate::utils::errors::ServiceError;
|
use crate::utils::{errors::ServiceError, playout_config, Role};
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize)]
|
||||||
struct User {
|
struct User {
|
||||||
@ -29,11 +26,8 @@ impl User {
|
|||||||
/// curl -X GET 'http://127.0.0.1:8787/api/generate-uuid' -H 'Authorization: Bearer <TOKEN>'
|
/// curl -X GET 'http://127.0.0.1:8787/api/generate-uuid' -H 'Authorization: Bearer <TOKEN>'
|
||||||
/// ```
|
/// ```
|
||||||
#[post("/generate-uuid")]
|
#[post("/generate-uuid")]
|
||||||
#[protect(
|
#[protect(any("Role::Admin", "Role::User"), ty = "Role")]
|
||||||
any("Role::GlobalAdmin", "Role::ChannelAdmin", "Role::User"),
|
async fn generate_uuid(data: web::Data<AuthState>) -> Result<impl Responder, ServiceError> {
|
||||||
ty = "Role"
|
|
||||||
)]
|
|
||||||
async fn generate_uuid(data: web::Data<SseAuthState>) -> Result<impl Responder, ServiceError> {
|
|
||||||
let mut uuids = data.uuids.lock().await;
|
let mut uuids = data.uuids.lock().await;
|
||||||
let new_uuid = UuidData::new();
|
let new_uuid = UuidData::new();
|
||||||
let user_auth = User::new(String::new(), new_uuid.uuid.to_string());
|
let user_auth = User::new(String::new(), new_uuid.uuid.to_string());
|
||||||
@ -52,7 +46,7 @@ async fn generate_uuid(data: web::Data<SseAuthState>) -> Result<impl Responder,
|
|||||||
/// ```
|
/// ```
|
||||||
#[get("/validate")]
|
#[get("/validate")]
|
||||||
async fn validate_uuid(
|
async fn validate_uuid(
|
||||||
data: web::Data<SseAuthState>,
|
data: web::Data<AuthState>,
|
||||||
user: web::Query<User>,
|
user: web::Query<User>,
|
||||||
) -> Result<impl Responder, ServiceError> {
|
) -> Result<impl Responder, ServiceError> {
|
||||||
let mut uuids = data.uuids.lock().await;
|
let mut uuids = data.uuids.lock().await;
|
||||||
@ -68,21 +62,21 @@ async fn validate_uuid(
|
|||||||
/// ```BASH
|
/// ```BASH
|
||||||
/// curl -X GET 'http://127.0.0.1:8787/data/event/1?endpoint=system&uuid=f2f8c29b-712a-48c5-8919-b535d3a05a3a'
|
/// curl -X GET 'http://127.0.0.1:8787/data/event/1?endpoint=system&uuid=f2f8c29b-712a-48c5-8919-b535d3a05a3a'
|
||||||
/// ```
|
/// ```
|
||||||
#[get("/event/{id}")]
|
#[get("/event/{channel}")]
|
||||||
async fn event_stream(
|
async fn event_stream(
|
||||||
|
pool: web::Data<Pool<Sqlite>>,
|
||||||
broadcaster: web::Data<Broadcaster>,
|
broadcaster: web::Data<Broadcaster>,
|
||||||
data: web::Data<SseAuthState>,
|
data: web::Data<AuthState>,
|
||||||
id: web::Path<i32>,
|
id: web::Path<i32>,
|
||||||
user: web::Query<User>,
|
user: web::Query<User>,
|
||||||
controllers: web::Data<Mutex<ChannelController>>,
|
|
||||||
) -> Result<impl Responder, ServiceError> {
|
) -> Result<impl Responder, ServiceError> {
|
||||||
let mut uuids = data.uuids.lock().await;
|
let mut uuids = data.uuids.lock().await;
|
||||||
|
|
||||||
check_uuid(&mut uuids, user.uuid.as_str())?;
|
check_uuid(&mut uuids, user.uuid.as_str())?;
|
||||||
|
|
||||||
let manager = controllers.lock().unwrap().get(*id).unwrap();
|
let (config, _) = playout_config(&pool.clone().into_inner(), &id).await?;
|
||||||
|
|
||||||
Ok(broadcaster
|
Ok(broadcaster
|
||||||
.new_client(manager.clone(), user.endpoint.clone())
|
.new_client(*id, config, user.endpoint.clone())
|
||||||
.await)
|
.await)
|
||||||
}
|
}
|
36
ffplayout-api/src/utils/args_parse.rs
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use clap::Parser;
|
||||||
|
|
||||||
|
#[derive(Parser, Debug, Clone)]
|
||||||
|
#[clap(version,
|
||||||
|
about = "REST API for ffplayout",
|
||||||
|
long_about = None)]
|
||||||
|
pub struct Args {
|
||||||
|
#[clap(short, long, help = "ask for user credentials")]
|
||||||
|
pub ask: bool,
|
||||||
|
|
||||||
|
#[clap(long, help = "path to database file")]
|
||||||
|
pub db: Option<PathBuf>,
|
||||||
|
|
||||||
|
#[clap(long, help = "path to public files")]
|
||||||
|
pub public: Option<PathBuf>,
|
||||||
|
|
||||||
|
#[clap(short, long, help = "Listen on IP:PORT, like: 127.0.0.1:8787")]
|
||||||
|
pub listen: Option<String>,
|
||||||
|
|
||||||
|
#[clap(short, long, help = "Initialize Database")]
|
||||||
|
pub init: bool,
|
||||||
|
|
||||||
|
#[clap(short, long, help = "domain name for initialization")]
|
||||||
|
pub domain: Option<String>,
|
||||||
|
|
||||||
|
#[clap(short, long, help = "Create admin user")]
|
||||||
|
pub username: Option<String>,
|
||||||
|
|
||||||
|
#[clap(short, long, help = "Admin mail address")]
|
||||||
|
pub mail: Option<String>,
|
||||||
|
|
||||||
|
#[clap(short, long, help = "Admin password")]
|
||||||
|
pub password: Option<String>,
|
||||||
|
}
|
74
ffplayout-api/src/utils/channels.rs
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
|
use rand::prelude::*;
|
||||||
|
use simplelog::*;
|
||||||
|
use sqlx::{Pool, Sqlite};
|
||||||
|
|
||||||
|
use crate::utils::{
|
||||||
|
control::{control_service, ServiceCmd},
|
||||||
|
errors::ServiceError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use ffplayout_lib::utils::PlayoutConfig;
|
||||||
|
|
||||||
|
use crate::db::{handles, models::Channel};
|
||||||
|
use crate::utils::playout_config;
|
||||||
|
|
||||||
|
pub async fn create_channel(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
target_channel: Channel,
|
||||||
|
) -> Result<Channel, ServiceError> {
|
||||||
|
if !target_channel.service.starts_with("ffplayout@") {
|
||||||
|
return Err(ServiceError::BadRequest("Bad service name!".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !target_channel.config_path.starts_with("/etc/ffplayout") {
|
||||||
|
return Err(ServiceError::BadRequest("Bad config path!".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let channel_name = target_channel.name.to_lowercase().replace(' ', "");
|
||||||
|
let channel_num = match handles::select_last_channel(conn).await {
|
||||||
|
Ok(num) => num + 1,
|
||||||
|
Err(_) => rand::thread_rng().gen_range(71..99),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut config = PlayoutConfig::new(
|
||||||
|
Some(PathBuf::from("/usr/share/ffplayout/ffplayout.toml.orig")),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
config.general.stat_file = format!(".ffp_{channel_name}",);
|
||||||
|
config.logging.path = config.logging.path.join(&channel_name);
|
||||||
|
config.rpc_server.address = format!("127.0.0.1:70{:7>2}", channel_num);
|
||||||
|
config.playlist.path = config.playlist.path.join(channel_name);
|
||||||
|
|
||||||
|
config.out.output_param = config
|
||||||
|
.out
|
||||||
|
.output_param
|
||||||
|
.replace("stream.m3u8", &format!("stream{channel_num}.m3u8"))
|
||||||
|
.replace("stream-%d.ts", &format!("stream{channel_num}-%d.ts"));
|
||||||
|
|
||||||
|
let toml_string = toml_edit::ser::to_string_pretty(&config)?;
|
||||||
|
fs::write(&target_channel.config_path, toml_string)?;
|
||||||
|
|
||||||
|
let new_channel = handles::insert_channel(conn, target_channel).await?;
|
||||||
|
control_service(conn, &config, new_channel.id, &ServiceCmd::Enable, None).await?;
|
||||||
|
|
||||||
|
Ok(new_channel)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_channel(conn: &Pool<Sqlite>, id: i32) -> Result<(), ServiceError> {
|
||||||
|
let channel = handles::select_channel(conn, &id).await?;
|
||||||
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
|
|
||||||
|
control_service(conn, &config, channel.id, &ServiceCmd::Stop, None).await?;
|
||||||
|
control_service(conn, &config, channel.id, &ServiceCmd::Disable, None).await?;
|
||||||
|
|
||||||
|
if let Err(e) = fs::remove_file(channel.config_path) {
|
||||||
|
error!("{e}");
|
||||||
|
};
|
||||||
|
|
||||||
|
handles::delete_channel(conn, &id).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
345
ffplayout-api/src/utils/control.rs
Normal file
@ -0,0 +1,345 @@
|
|||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
env, fmt,
|
||||||
|
str::FromStr,
|
||||||
|
sync::atomic::{AtomicBool, Ordering},
|
||||||
|
};
|
||||||
|
|
||||||
|
use actix_web::web;
|
||||||
|
use reqwest::{header::AUTHORIZATION, Client, Response};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{Pool, Sqlite};
|
||||||
|
use tokio::{
|
||||||
|
process::{Child, Command},
|
||||||
|
sync::Mutex,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::db::handles::select_channel;
|
||||||
|
use crate::utils::errors::ServiceError;
|
||||||
|
use ffplayout_lib::{utils::PlayoutConfig, vec_strings};
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
|
struct TextParams {
|
||||||
|
control: String,
|
||||||
|
message: HashMap<String, String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
|
pub struct ControlParams {
|
||||||
|
pub control: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
|
struct MediaParams {
|
||||||
|
media: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ffplayout engine process
|
||||||
|
///
|
||||||
|
/// When running not on Linux, or with environment variable `PIGGYBACK_MODE=true`,
|
||||||
|
/// the engine get startet and controlled from ffpapi
|
||||||
|
pub struct ProcessControl {
|
||||||
|
pub engine_child: Mutex<Option<Child>>,
|
||||||
|
pub is_running: AtomicBool,
|
||||||
|
pub piggyback: AtomicBool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProcessControl {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
let piggyback = if env::consts::OS != "linux" || env::var("PIGGYBACK_MODE").is_ok() {
|
||||||
|
AtomicBool::new(true)
|
||||||
|
} else {
|
||||||
|
AtomicBool::new(false)
|
||||||
|
};
|
||||||
|
|
||||||
|
Self {
|
||||||
|
engine_child: Mutex::new(None),
|
||||||
|
is_running: AtomicBool::new(false),
|
||||||
|
piggyback,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProcessControl {
|
||||||
|
pub async fn start(&self) -> Result<String, ServiceError> {
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
let engine_path = "ffplayout";
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
let engine_path = "./target/debug/ffplayout";
|
||||||
|
|
||||||
|
match Command::new(engine_path).kill_on_drop(true).spawn() {
|
||||||
|
Ok(proc) => *self.engine_child.lock().await = Some(proc),
|
||||||
|
Err(_) => return Err(ServiceError::InternalServerError),
|
||||||
|
};
|
||||||
|
|
||||||
|
self.is_running.store(true, Ordering::SeqCst);
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn stop(&self) -> Result<String, ServiceError> {
|
||||||
|
if let Some(proc) = self.engine_child.lock().await.as_mut() {
|
||||||
|
if proc.kill().await.is_err() {
|
||||||
|
return Err(ServiceError::InternalServerError);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
self.wait().await?;
|
||||||
|
self.is_running.store(false, Ordering::SeqCst);
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn restart(&self) -> Result<String, ServiceError> {
|
||||||
|
self.stop().await?;
|
||||||
|
self.start().await?;
|
||||||
|
|
||||||
|
self.is_running.store(true, Ordering::SeqCst);
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Wait for process to proper close.
|
||||||
|
/// This prevents orphaned/zombi processes in system
|
||||||
|
pub async fn wait(&self) -> Result<String, ServiceError> {
|
||||||
|
if let Some(proc) = self.engine_child.lock().await.as_mut() {
|
||||||
|
if proc.wait().await.is_err() {
|
||||||
|
return Err(ServiceError::InternalServerError);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn status(&self) -> Result<String, ServiceError> {
|
||||||
|
if self.is_running.load(Ordering::SeqCst) {
|
||||||
|
Ok("active".to_string())
|
||||||
|
} else {
|
||||||
|
Ok("not running".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ProcessControl {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum ServiceCmd {
|
||||||
|
Enable,
|
||||||
|
Disable,
|
||||||
|
Start,
|
||||||
|
Stop,
|
||||||
|
Restart,
|
||||||
|
Status,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for ServiceCmd {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
||||||
|
match input.to_lowercase().as_str() {
|
||||||
|
"enable" => Ok(Self::Enable),
|
||||||
|
"disable" => Ok(Self::Disable),
|
||||||
|
"start" => Ok(Self::Start),
|
||||||
|
"stop" => Ok(Self::Stop),
|
||||||
|
"restart" => Ok(Self::Restart),
|
||||||
|
"status" => Ok(Self::Status),
|
||||||
|
_ => Err(format!("Command '{input}' not found!")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ServiceCmd {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match *self {
|
||||||
|
Self::Enable => write!(f, "enable"),
|
||||||
|
Self::Disable => write!(f, "disable"),
|
||||||
|
Self::Start => write!(f, "start"),
|
||||||
|
Self::Stop => write!(f, "stop"),
|
||||||
|
Self::Restart => write!(f, "restart"),
|
||||||
|
Self::Status => write!(f, "status"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
|
pub struct Process {
|
||||||
|
pub command: ServiceCmd,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SystemD {
|
||||||
|
service: String,
|
||||||
|
cmd: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SystemD {
|
||||||
|
async fn new(conn: &Pool<Sqlite>, id: i32) -> Result<Self, ServiceError> {
|
||||||
|
let channel = select_channel(conn, &id).await?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
service: channel.service,
|
||||||
|
cmd: vec_strings!["/usr/bin/systemctl"],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn enable(mut self) -> Result<String, ServiceError> {
|
||||||
|
self.cmd
|
||||||
|
.append(&mut vec!["enable".to_string(), self.service]);
|
||||||
|
|
||||||
|
Command::new("sudo").args(self.cmd).spawn()?;
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn disable(mut self) -> Result<String, ServiceError> {
|
||||||
|
self.cmd
|
||||||
|
.append(&mut vec!["disable".to_string(), self.service]);
|
||||||
|
|
||||||
|
Command::new("sudo").args(self.cmd).spawn()?;
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn start(mut self) -> Result<String, ServiceError> {
|
||||||
|
self.cmd
|
||||||
|
.append(&mut vec!["start".to_string(), self.service]);
|
||||||
|
|
||||||
|
Command::new("sudo").args(self.cmd).spawn()?;
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stop(mut self) -> Result<String, ServiceError> {
|
||||||
|
self.cmd.append(&mut vec!["stop".to_string(), self.service]);
|
||||||
|
|
||||||
|
Command::new("sudo").args(self.cmd).spawn()?;
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn restart(mut self) -> Result<String, ServiceError> {
|
||||||
|
self.cmd
|
||||||
|
.append(&mut vec!["restart".to_string(), self.service]);
|
||||||
|
|
||||||
|
Command::new("sudo").args(self.cmd).spawn()?;
|
||||||
|
|
||||||
|
Ok("Success".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn status(mut self) -> Result<String, ServiceError> {
|
||||||
|
self.cmd
|
||||||
|
.append(&mut vec!["is-active".to_string(), self.service]);
|
||||||
|
|
||||||
|
let output = Command::new("sudo").args(self.cmd).output().await?;
|
||||||
|
|
||||||
|
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn post_request<T>(config: &PlayoutConfig, obj: T) -> Result<Response, ServiceError>
|
||||||
|
where
|
||||||
|
T: Serialize,
|
||||||
|
{
|
||||||
|
let url = format!("http://{}", config.rpc_server.address);
|
||||||
|
let client = Client::new();
|
||||||
|
|
||||||
|
match client
|
||||||
|
.post(&url)
|
||||||
|
.header(AUTHORIZATION, &config.rpc_server.authorization)
|
||||||
|
.json(&obj)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(result) => Ok(result),
|
||||||
|
Err(e) => Err(ServiceError::ServiceUnavailable(e.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_message(
|
||||||
|
config: &PlayoutConfig,
|
||||||
|
message: HashMap<String, String>,
|
||||||
|
) -> Result<Response, ServiceError> {
|
||||||
|
let json_obj = TextParams {
|
||||||
|
control: "text".into(),
|
||||||
|
message,
|
||||||
|
};
|
||||||
|
|
||||||
|
post_request(config, json_obj).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn control_state(
|
||||||
|
config: &PlayoutConfig,
|
||||||
|
command: &str,
|
||||||
|
) -> Result<Response, ServiceError> {
|
||||||
|
let json_obj = ControlParams {
|
||||||
|
control: command.to_owned(),
|
||||||
|
};
|
||||||
|
|
||||||
|
post_request(config, json_obj).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn media_info(config: &PlayoutConfig, command: String) -> Result<Response, ServiceError> {
|
||||||
|
let json_obj = MediaParams { media: command };
|
||||||
|
|
||||||
|
post_request(config, json_obj).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn control_service(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
config: &PlayoutConfig,
|
||||||
|
id: i32,
|
||||||
|
command: &ServiceCmd,
|
||||||
|
engine: Option<web::Data<ProcessControl>>,
|
||||||
|
) -> Result<String, ServiceError> {
|
||||||
|
if let Some(en) = engine {
|
||||||
|
if en.piggyback.load(Ordering::SeqCst) {
|
||||||
|
match command {
|
||||||
|
ServiceCmd::Start => en.start().await,
|
||||||
|
ServiceCmd::Stop => {
|
||||||
|
if control_state(config, "stop_all").await.is_ok() {
|
||||||
|
en.stop().await
|
||||||
|
} else {
|
||||||
|
Err(ServiceError::NoContent("Nothing to stop".to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ServiceCmd::Restart => {
|
||||||
|
if control_state(config, "stop_all").await.is_ok() {
|
||||||
|
en.restart().await
|
||||||
|
} else {
|
||||||
|
Err(ServiceError::NoContent("Nothing to restart".to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ServiceCmd::Status => en.status(),
|
||||||
|
_ => Err(ServiceError::Conflict(
|
||||||
|
"Engine runs in piggyback mode, in this mode this command is not allowed."
|
||||||
|
.to_string(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
execute_systemd(conn, id, command).await
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
execute_systemd(conn, id, command).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn execute_systemd(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
|
command: &ServiceCmd,
|
||||||
|
) -> Result<String, ServiceError> {
|
||||||
|
let system_d = SystemD::new(conn, id).await?;
|
||||||
|
match command {
|
||||||
|
ServiceCmd::Enable => system_d.enable(),
|
||||||
|
ServiceCmd::Disable => system_d.disable(),
|
||||||
|
ServiceCmd::Start => system_d.start(),
|
||||||
|
ServiceCmd::Stop => system_d.stop(),
|
||||||
|
ServiceCmd::Restart => system_d.restart(),
|
||||||
|
ServiceCmd::Status => system_d.status().await,
|
||||||
|
}
|
||||||
|
}
|
@ -1,30 +1,27 @@
|
|||||||
use std::io;
|
|
||||||
|
|
||||||
use actix_web::{error::ResponseError, Error, HttpResponse};
|
use actix_web::{error::ResponseError, Error, HttpResponse};
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use ffprobe::FfProbeError;
|
|
||||||
|
|
||||||
#[derive(Debug, Display)]
|
#[derive(Debug, Display)]
|
||||||
pub enum ServiceError {
|
pub enum ServiceError {
|
||||||
#[display("Internal Server Error")]
|
#[display(fmt = "Internal Server Error")]
|
||||||
InternalServerError,
|
InternalServerError,
|
||||||
|
|
||||||
#[display("BadRequest: {_0}")]
|
#[display(fmt = "BadRequest: {_0}")]
|
||||||
BadRequest(String),
|
BadRequest(String),
|
||||||
|
|
||||||
#[display("Conflict: {_0}")]
|
#[display(fmt = "Conflict: {_0}")]
|
||||||
Conflict(String),
|
Conflict(String),
|
||||||
|
|
||||||
#[display("Forbidden: {_0}")]
|
#[display(fmt = "Forbidden: {_0}")]
|
||||||
Forbidden(String),
|
Forbidden(String),
|
||||||
|
|
||||||
#[display("Unauthorized: {_0}")]
|
#[display(fmt = "Unauthorized: {_0}")]
|
||||||
Unauthorized(String),
|
Unauthorized(String),
|
||||||
|
|
||||||
#[display("NoContent: {_0}")]
|
#[display(fmt = "NoContent: {_0}")]
|
||||||
NoContent(String),
|
NoContent(String),
|
||||||
|
|
||||||
#[display("ServiceUnavailable: {_0}")]
|
#[display(fmt = "ServiceUnavailable: {_0}")]
|
||||||
ServiceUnavailable(String),
|
ServiceUnavailable(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,12 +74,6 @@ impl From<std::num::ParseIntError> for ServiceError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<jsonwebtoken::errors::Error> for ServiceError {
|
|
||||||
fn from(err: jsonwebtoken::errors::Error) -> ServiceError {
|
|
||||||
ServiceError::Unauthorized(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<actix_web::error::BlockingError> for ServiceError {
|
impl From<actix_web::error::BlockingError> for ServiceError {
|
||||||
fn from(err: actix_web::error::BlockingError) -> ServiceError {
|
fn from(err: actix_web::error::BlockingError) -> ServiceError {
|
||||||
ServiceError::BadRequest(err.to_string())
|
ServiceError::BadRequest(err.to_string())
|
||||||
@ -107,96 +98,8 @@ impl From<toml_edit::ser::Error> for ServiceError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<toml_edit::TomlError> for ServiceError {
|
|
||||||
fn from(err: toml_edit::TomlError) -> ServiceError {
|
|
||||||
ServiceError::BadRequest(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<uuid::Error> for ServiceError {
|
impl From<uuid::Error> for ServiceError {
|
||||||
fn from(err: uuid::Error) -> ServiceError {
|
fn from(err: uuid::Error) -> ServiceError {
|
||||||
ServiceError::BadRequest(err.to_string())
|
ServiceError::BadRequest(err.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<serde_json::Error> for ServiceError {
|
|
||||||
fn from(err: serde_json::Error) -> ServiceError {
|
|
||||||
ServiceError::BadRequest(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for ServiceError {
|
|
||||||
fn from(err: &str) -> ServiceError {
|
|
||||||
ServiceError::BadRequest(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Display)]
|
|
||||||
pub enum ProcessError {
|
|
||||||
#[display("Failed to spawn ffmpeg/ffprobe. {}", _0)]
|
|
||||||
CommandSpawn(io::Error),
|
|
||||||
#[display("{}", _0)]
|
|
||||||
Custom(String),
|
|
||||||
#[display("IO error: {}", _0)]
|
|
||||||
IO(io::Error),
|
|
||||||
#[display("{}", _0)]
|
|
||||||
Ffprobe(FfProbeError),
|
|
||||||
#[display("Regex compile error {}", _0)]
|
|
||||||
Regex(String),
|
|
||||||
#[display("Thread error {}", _0)]
|
|
||||||
Thread(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for ProcessError {
|
|
||||||
fn from(err: std::io::Error) -> ProcessError {
|
|
||||||
ProcessError::IO(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FfProbeError> for ProcessError {
|
|
||||||
fn from(err: FfProbeError) -> Self {
|
|
||||||
Self::Ffprobe(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<lettre::address::AddressError> for ProcessError {
|
|
||||||
fn from(err: lettre::address::AddressError) -> ProcessError {
|
|
||||||
ProcessError::Custom(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<lettre::transport::smtp::Error> for ProcessError {
|
|
||||||
fn from(err: lettre::transport::smtp::Error) -> ProcessError {
|
|
||||||
ProcessError::Custom(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<lettre::error::Error> for ProcessError {
|
|
||||||
fn from(err: lettre::error::Error) -> ProcessError {
|
|
||||||
ProcessError::Custom(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<std::sync::PoisonError<T>> for ProcessError {
|
|
||||||
fn from(err: std::sync::PoisonError<T>) -> ProcessError {
|
|
||||||
ProcessError::Custom(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<regex::Error> for ProcessError {
|
|
||||||
fn from(err: regex::Error) -> Self {
|
|
||||||
Self::Regex(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<serde_json::Error> for ProcessError {
|
|
||||||
fn from(err: serde_json::Error) -> Self {
|
|
||||||
Self::Custom(err.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Box<dyn std::any::Any + std::marker::Send>> for ProcessError {
|
|
||||||
fn from(err: Box<dyn std::any::Any + std::marker::Send>) -> Self {
|
|
||||||
Self::Thread(format!("{err:?}"))
|
|
||||||
}
|
|
||||||
}
|
|
@ -6,17 +6,18 @@ use std::{
|
|||||||
use actix_multipart::Multipart;
|
use actix_multipart::Multipart;
|
||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use futures_util::TryStreamExt as _;
|
use futures_util::TryStreamExt as _;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
use lexical_sort::{natural_lexical_cmp, PathSort};
|
use lexical_sort::{natural_lexical_cmp, PathSort};
|
||||||
use rand::{distributions::Alphanumeric, Rng};
|
use rand::{distributions::Alphanumeric, Rng};
|
||||||
use relative_path::RelativePath;
|
use relative_path::RelativePath;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{Pool, Sqlite};
|
||||||
use tokio::fs;
|
use tokio::fs;
|
||||||
|
|
||||||
use log::*;
|
use simplelog::*;
|
||||||
|
|
||||||
use crate::db::models::Channel;
|
use crate::utils::{errors::ServiceError, playout_config};
|
||||||
use crate::player::utils::{file_extension, MediaProbe};
|
use ffplayout_lib::utils::{file_extension, MediaProbe};
|
||||||
use crate::utils::{config::PlayoutConfig, errors::ServiceError};
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
pub struct PathObject {
|
pub struct PathObject {
|
||||||
@ -27,8 +28,6 @@ pub struct PathObject {
|
|||||||
files: Option<Vec<VideoFile>>,
|
files: Option<Vec<VideoFile>>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub folders_only: bool,
|
pub folders_only: bool,
|
||||||
#[serde(default)]
|
|
||||||
pub recursive: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PathObject {
|
impl PathObject {
|
||||||
@ -40,7 +39,6 @@ impl PathObject {
|
|||||||
folders: Some(vec![]),
|
folders: Some(vec![]),
|
||||||
files: Some(vec![]),
|
files: Some(vec![]),
|
||||||
folders_only: false,
|
folders_only: false,
|
||||||
recursive: false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -57,6 +55,23 @@ pub struct VideoFile {
|
|||||||
duration: f64,
|
duration: f64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref HOME_DIR: String = home::home_dir()
|
||||||
|
.unwrap_or("/home/h1wl3n2og".into()) // any random not existing folder
|
||||||
|
.as_os_str()
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
const FOLDER_WHITELIST: &[&str; 6] = &[
|
||||||
|
"/media",
|
||||||
|
"/mnt",
|
||||||
|
"/playlists",
|
||||||
|
"/tv-media",
|
||||||
|
"/usr/share/ffplayout",
|
||||||
|
"/var/lib/ffplayout",
|
||||||
|
];
|
||||||
|
|
||||||
/// Normalize absolut path
|
/// Normalize absolut path
|
||||||
///
|
///
|
||||||
/// This function takes care, that it is not possible to break out from root_path.
|
/// This function takes care, that it is not possible to break out from root_path.
|
||||||
@ -68,15 +83,15 @@ pub fn norm_abs_path(
|
|||||||
.normalize()
|
.normalize()
|
||||||
.to_string()
|
.to_string()
|
||||||
.replace("../", "");
|
.replace("../", "");
|
||||||
|
let mut source_relative = RelativePath::new(input_path)
|
||||||
|
.normalize()
|
||||||
|
.to_string()
|
||||||
|
.replace("../", "");
|
||||||
let path_suffix = root_path
|
let path_suffix = root_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.to_string();
|
.to_string();
|
||||||
let mut source_relative = RelativePath::new(input_path)
|
|
||||||
.normalize()
|
|
||||||
.to_string()
|
|
||||||
.replace("../", "");
|
|
||||||
|
|
||||||
if input_path.starts_with(&*root_path.to_string_lossy())
|
if input_path.starts_with(&*root_path.to_string_lossy())
|
||||||
|| source_relative.starts_with(&path_relative)
|
|| source_relative.starts_with(&path_relative)
|
||||||
@ -96,6 +111,14 @@ pub fn norm_abs_path(
|
|||||||
|
|
||||||
let path = &root_path.join(&source_relative);
|
let path = &root_path.join(&source_relative);
|
||||||
|
|
||||||
|
if !FOLDER_WHITELIST.iter().any(|f| path.starts_with(f))
|
||||||
|
&& !path.starts_with(HOME_DIR.to_string())
|
||||||
|
{
|
||||||
|
return Err(ServiceError::Forbidden(
|
||||||
|
"Access forbidden: Folder cannot be opened.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
Ok((path.to_path_buf(), path_suffix, source_relative))
|
Ok((path.to_path_buf(), path_suffix, source_relative))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,25 +128,26 @@ pub fn norm_abs_path(
|
|||||||
/// Input should be a relative path segment, but when it is a absolut path, the norm_abs_path function
|
/// Input should be a relative path segment, but when it is a absolut path, the norm_abs_path function
|
||||||
/// will take care, that user can not break out from given storage path in config.
|
/// will take care, that user can not break out from given storage path in config.
|
||||||
pub async fn browser(
|
pub async fn browser(
|
||||||
config: &PlayoutConfig,
|
conn: &Pool<Sqlite>,
|
||||||
channel: &Channel,
|
id: i32,
|
||||||
path_obj: &PathObject,
|
path_obj: &PathObject,
|
||||||
) -> Result<PathObject, ServiceError> {
|
) -> Result<PathObject, ServiceError> {
|
||||||
|
let (config, channel) = playout_config(conn, &id).await?;
|
||||||
let mut channel_extensions = channel
|
let mut channel_extensions = channel
|
||||||
.extra_extensions
|
.extra_extensions
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(|e| e.to_string())
|
.map(|e| e.to_string())
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>();
|
||||||
let mut parent_folders = vec![];
|
let mut parent_folders = vec![];
|
||||||
let mut extensions = config.storage.extensions.clone();
|
let mut extensions = config.storage.extensions;
|
||||||
extensions.append(&mut channel_extensions);
|
extensions.append(&mut channel_extensions);
|
||||||
|
|
||||||
let (path, parent, path_component) = norm_abs_path(&config.channel.storage, &path_obj.source)?;
|
let (path, parent, path_component) = norm_abs_path(&config.storage.path, &path_obj.source)?;
|
||||||
|
|
||||||
let parent_path = if !path_component.is_empty() {
|
let parent_path = if !path_component.is_empty() {
|
||||||
path.parent().unwrap()
|
path.parent().unwrap()
|
||||||
} else {
|
} else {
|
||||||
&config.channel.storage
|
&config.storage.path
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut obj = PathObject::new(path_component, Some(parent));
|
let mut obj = PathObject::new(path_component, Some(parent));
|
||||||
@ -211,10 +235,12 @@ pub async fn browser(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_directory(
|
pub async fn create_directory(
|
||||||
config: &PlayoutConfig,
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
path_obj: &PathObject,
|
path_obj: &PathObject,
|
||||||
) -> Result<HttpResponse, ServiceError> {
|
) -> Result<HttpResponse, ServiceError> {
|
||||||
let (path, _, _) = norm_abs_path(&config.channel.storage, &path_obj.source)?;
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
|
let (path, _, _) = norm_abs_path(&config.storage.path, &path_obj.source)?;
|
||||||
|
|
||||||
if let Err(e) = fs::create_dir_all(&path).await {
|
if let Err(e) = fs::create_dir_all(&path).await {
|
||||||
return Err(ServiceError::BadRequest(e.to_string()));
|
return Err(ServiceError::BadRequest(e.to_string()));
|
||||||
@ -280,11 +306,13 @@ async fn rename(source: &PathBuf, target: &PathBuf) -> Result<MoveObject, Servic
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn rename_file(
|
pub async fn rename_file(
|
||||||
config: &PlayoutConfig,
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
move_object: &MoveObject,
|
move_object: &MoveObject,
|
||||||
) -> Result<MoveObject, ServiceError> {
|
) -> Result<MoveObject, ServiceError> {
|
||||||
let (source_path, _, _) = norm_abs_path(&config.channel.storage, &move_object.source)?;
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
let (mut target_path, _, _) = norm_abs_path(&config.channel.storage, &move_object.target)?;
|
let (source_path, _, _) = norm_abs_path(&config.storage.path, &move_object.source)?;
|
||||||
|
let (mut target_path, _, _) = norm_abs_path(&config.storage.path, &move_object.target)?;
|
||||||
|
|
||||||
if !source_path.exists() {
|
if !source_path.exists() {
|
||||||
return Err(ServiceError::BadRequest("Source file not exist!".into()));
|
return Err(ServiceError::BadRequest("Source file not exist!".into()));
|
||||||
@ -313,38 +341,27 @@ pub async fn rename_file(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn remove_file_or_folder(
|
pub async fn remove_file_or_folder(
|
||||||
config: &PlayoutConfig,
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
source_path: &str,
|
source_path: &str,
|
||||||
recursive: bool,
|
|
||||||
) -> Result<(), ServiceError> {
|
) -> Result<(), ServiceError> {
|
||||||
let (source, _, _) = norm_abs_path(&config.channel.storage, source_path)?;
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
|
let (source, _, _) = norm_abs_path(&config.storage.path, source_path)?;
|
||||||
|
|
||||||
if !source.exists() {
|
if !source.exists() {
|
||||||
return Err(ServiceError::BadRequest("Source does not exists!".into()));
|
return Err(ServiceError::BadRequest("Source does not exists!".into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if source.is_dir() {
|
if source.is_dir() {
|
||||||
if recursive {
|
match fs::remove_dir(source).await {
|
||||||
match fs::remove_dir_all(source).await {
|
Ok(_) => return Ok(()),
|
||||||
Ok(_) => return Ok(()),
|
Err(e) => {
|
||||||
Err(e) => {
|
error!("{e}");
|
||||||
error!("{e}");
|
return Err(ServiceError::BadRequest(
|
||||||
return Err(ServiceError::BadRequest(
|
"Delete folder failed! (Folder must be empty)".into(),
|
||||||
"Delete folder and its content failed!".into(),
|
));
|
||||||
));
|
}
|
||||||
}
|
};
|
||||||
};
|
|
||||||
} else {
|
|
||||||
match fs::remove_dir(source).await {
|
|
||||||
Ok(_) => return Ok(()),
|
|
||||||
Err(e) => {
|
|
||||||
error!("{e}");
|
|
||||||
return Err(ServiceError::BadRequest(
|
|
||||||
"Delete folder failed! (Folder must be empty)".into(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if source.is_file() {
|
if source.is_file() {
|
||||||
@ -360,8 +377,9 @@ pub async fn remove_file_or_folder(
|
|||||||
Err(ServiceError::InternalServerError)
|
Err(ServiceError::InternalServerError)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn valid_path(config: &PlayoutConfig, path: &str) -> Result<PathBuf, ServiceError> {
|
async fn valid_path(conn: &Pool<Sqlite>, id: i32, path: &str) -> Result<PathBuf, ServiceError> {
|
||||||
let (test_path, _, _) = norm_abs_path(&config.channel.storage, path)?;
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
|
let (test_path, _, _) = norm_abs_path(&config.storage.path, path)?;
|
||||||
|
|
||||||
if !test_path.is_dir() {
|
if !test_path.is_dir() {
|
||||||
return Err(ServiceError::BadRequest("Target folder not exists!".into()));
|
return Err(ServiceError::BadRequest("Target folder not exists!".into()));
|
||||||
@ -371,14 +389,15 @@ async fn valid_path(config: &PlayoutConfig, path: &str) -> Result<PathBuf, Servi
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn upload(
|
pub async fn upload(
|
||||||
config: &PlayoutConfig,
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
_size: u64,
|
_size: u64,
|
||||||
mut payload: Multipart,
|
mut payload: Multipart,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
abs_path: bool,
|
abs_path: bool,
|
||||||
) -> Result<HttpResponse, ServiceError> {
|
) -> Result<HttpResponse, ServiceError> {
|
||||||
while let Some(mut field) = payload.try_next().await? {
|
while let Some(mut field) = payload.try_next().await? {
|
||||||
let content_disposition = field.content_disposition().ok_or("No content")?;
|
let content_disposition = field.content_disposition();
|
||||||
debug!("{content_disposition}");
|
debug!("{content_disposition}");
|
||||||
let rand_string: String = rand::thread_rng()
|
let rand_string: String = rand::thread_rng()
|
||||||
.sample_iter(&Alphanumeric)
|
.sample_iter(&Alphanumeric)
|
||||||
@ -392,7 +411,7 @@ pub async fn upload(
|
|||||||
let filepath = if abs_path {
|
let filepath = if abs_path {
|
||||||
path.to_path_buf()
|
path.to_path_buf()
|
||||||
} else {
|
} else {
|
||||||
valid_path(config, &path.to_string_lossy())
|
valid_path(conn, id, &path.to_string_lossy())
|
||||||
.await?
|
.await?
|
||||||
.join(filename)
|
.join(filename)
|
||||||
};
|
};
|
390
ffplayout-api/src/utils/mod.rs
Normal file
@ -0,0 +1,390 @@
|
|||||||
|
use std::{
|
||||||
|
env,
|
||||||
|
error::Error,
|
||||||
|
fmt,
|
||||||
|
fs::{self, metadata, File},
|
||||||
|
io::{stdin, stdout, Read, Write},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
str::FromStr,
|
||||||
|
};
|
||||||
|
|
||||||
|
use chrono::{format::ParseErrorKind, prelude::*};
|
||||||
|
use faccess::PathExt;
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
|
use path_clean::PathClean;
|
||||||
|
use rpassword::read_password;
|
||||||
|
use serde::{de, Deserialize, Deserializer, Serialize};
|
||||||
|
use simplelog::*;
|
||||||
|
use sqlx::{sqlite::SqliteRow, FromRow, Pool, Row, Sqlite};
|
||||||
|
|
||||||
|
use crate::ARGS;
|
||||||
|
|
||||||
|
pub mod args_parse;
|
||||||
|
pub mod channels;
|
||||||
|
pub mod control;
|
||||||
|
pub mod errors;
|
||||||
|
pub mod files;
|
||||||
|
pub mod playlist;
|
||||||
|
pub mod system;
|
||||||
|
|
||||||
|
use crate::db::{
|
||||||
|
db_pool,
|
||||||
|
handles::{db_init, insert_user, select_channel, select_global},
|
||||||
|
models::{Channel, User},
|
||||||
|
};
|
||||||
|
use crate::utils::errors::ServiceError;
|
||||||
|
use ffplayout_lib::utils::{time_to_sec, PlayoutConfig};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub enum Role {
|
||||||
|
Admin,
|
||||||
|
User,
|
||||||
|
Guest,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Role {
|
||||||
|
pub fn set_role(role: &str) -> Self {
|
||||||
|
match role {
|
||||||
|
"admin" => Role::Admin,
|
||||||
|
"user" => Role::User,
|
||||||
|
_ => Role::Guest,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Role {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(input: &str) -> Result<Self, Self::Err> {
|
||||||
|
match input {
|
||||||
|
"admin" => Ok(Self::Admin),
|
||||||
|
"user" => Ok(Self::User),
|
||||||
|
_ => Ok(Self::Guest),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Role {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match *self {
|
||||||
|
Self::Admin => write!(f, "admin"),
|
||||||
|
Self::User => write!(f, "user"),
|
||||||
|
Self::Guest => write!(f, "guest"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'r> sqlx::decode::Decode<'r, ::sqlx::Sqlite> for Role
|
||||||
|
where
|
||||||
|
&'r str: sqlx::decode::Decode<'r, sqlx::Sqlite>,
|
||||||
|
{
|
||||||
|
fn decode(
|
||||||
|
value: <sqlx::Sqlite as sqlx::database::HasValueRef<'r>>::ValueRef,
|
||||||
|
) -> Result<Role, Box<dyn Error + 'static + Send + Sync>> {
|
||||||
|
let value = <&str as sqlx::decode::Decode<sqlx::Sqlite>>::decode(value)?;
|
||||||
|
|
||||||
|
Ok(value.parse()?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromRow<'_, SqliteRow> for Role {
|
||||||
|
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
||||||
|
match row.get("name") {
|
||||||
|
"admin" => Ok(Self::Admin),
|
||||||
|
"user" => Ok(Self::User),
|
||||||
|
_ => Ok(Self::Guest),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, sqlx::FromRow)]
|
||||||
|
pub struct GlobalSettings {
|
||||||
|
pub secret: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GlobalSettings {
|
||||||
|
async fn new(conn: &Pool<Sqlite>) -> Self {
|
||||||
|
let global_settings = select_global(conn);
|
||||||
|
|
||||||
|
match global_settings.await {
|
||||||
|
Ok(g) => g,
|
||||||
|
Err(_) => GlobalSettings {
|
||||||
|
secret: String::new(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn global() -> &'static GlobalSettings {
|
||||||
|
INSTANCE.get().expect("Config is not initialized")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static INSTANCE: OnceCell<GlobalSettings> = OnceCell::new();
|
||||||
|
|
||||||
|
pub async fn init_config(conn: &Pool<Sqlite>) {
|
||||||
|
let config = GlobalSettings::new(conn).await;
|
||||||
|
INSTANCE.set(config).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn db_path() -> Result<&'static str, Box<dyn std::error::Error>> {
|
||||||
|
if let Some(path) = ARGS.db.clone() {
|
||||||
|
let absolute_path = if path.is_absolute() {
|
||||||
|
path
|
||||||
|
} else {
|
||||||
|
env::current_dir()?.join(path)
|
||||||
|
}
|
||||||
|
.clean();
|
||||||
|
|
||||||
|
if let Some(abs_path) = absolute_path.parent() {
|
||||||
|
if abs_path.writable() {
|
||||||
|
return Ok(Box::leak(
|
||||||
|
absolute_path.to_string_lossy().to_string().into_boxed_str(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
error!("Given database path is not writable!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let sys_path = Path::new("/usr/share/ffplayout/db");
|
||||||
|
let mut db_path = "./ffplayout.db";
|
||||||
|
|
||||||
|
if sys_path.is_dir() && !sys_path.writable() {
|
||||||
|
error!("Path {} is not writable!", sys_path.display());
|
||||||
|
}
|
||||||
|
|
||||||
|
if sys_path.is_dir() && sys_path.writable() {
|
||||||
|
db_path = "/usr/share/ffplayout/db/ffplayout.db";
|
||||||
|
} else if Path::new("./assets").is_dir() {
|
||||||
|
db_path = "./assets/ffplayout.db";
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(db_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn public_path() -> PathBuf {
|
||||||
|
let path = PathBuf::from("./ffplayout-frontend/.output/public/");
|
||||||
|
|
||||||
|
if cfg!(debug_assertions) && path.is_dir() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = PathBuf::from("/usr/share/ffplayout/public/");
|
||||||
|
|
||||||
|
if path.is_dir() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
PathBuf::from("./public/")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_args() -> Result<(), i32> {
|
||||||
|
let mut args = ARGS.clone();
|
||||||
|
|
||||||
|
if !args.init && args.listen.is_none() && !args.ask && args.username.is_none() {
|
||||||
|
error!("Wrong number of arguments! Run ffpapi --help for more information.");
|
||||||
|
|
||||||
|
return Err(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.init {
|
||||||
|
if let Err(e) = db_init(args.domain).await {
|
||||||
|
panic!("{e}");
|
||||||
|
};
|
||||||
|
|
||||||
|
return Err(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.ask {
|
||||||
|
let mut user = String::new();
|
||||||
|
print!("Username: ");
|
||||||
|
stdout().flush().unwrap();
|
||||||
|
|
||||||
|
stdin()
|
||||||
|
.read_line(&mut user)
|
||||||
|
.expect("Did not enter a correct name?");
|
||||||
|
if let Some('\n') = user.chars().next_back() {
|
||||||
|
user.pop();
|
||||||
|
}
|
||||||
|
if let Some('\r') = user.chars().next_back() {
|
||||||
|
user.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
args.username = Some(user);
|
||||||
|
|
||||||
|
print!("Password: ");
|
||||||
|
stdout().flush().unwrap();
|
||||||
|
let password = read_password();
|
||||||
|
|
||||||
|
args.password = password.ok();
|
||||||
|
|
||||||
|
let mut mail = String::new();
|
||||||
|
print!("Mail: ");
|
||||||
|
stdout().flush().unwrap();
|
||||||
|
|
||||||
|
stdin()
|
||||||
|
.read_line(&mut mail)
|
||||||
|
.expect("Did not enter a correct name?");
|
||||||
|
if let Some('\n') = mail.chars().next_back() {
|
||||||
|
mail.pop();
|
||||||
|
}
|
||||||
|
if let Some('\r') = mail.chars().next_back() {
|
||||||
|
mail.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
args.mail = Some(mail);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(username) = args.username {
|
||||||
|
if args.mail.is_none() || args.password.is_none() {
|
||||||
|
error!("Mail/password missing!");
|
||||||
|
return Err(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let user = User {
|
||||||
|
id: 0,
|
||||||
|
mail: Some(args.mail.unwrap()),
|
||||||
|
username: username.clone(),
|
||||||
|
password: args.password.unwrap(),
|
||||||
|
role_id: Some(1),
|
||||||
|
channel_id: Some(1),
|
||||||
|
token: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
match db_pool().await {
|
||||||
|
Ok(conn) => {
|
||||||
|
if let Err(e) = insert_user(&conn, user).await {
|
||||||
|
error!("{e}");
|
||||||
|
return Err(1);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(e) => {
|
||||||
|
error!("{e}");
|
||||||
|
return Err(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Create admin user \"{username}\" done...");
|
||||||
|
|
||||||
|
return Err(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_playout_config(path: &str) -> Result<PlayoutConfig, Box<dyn Error>> {
|
||||||
|
let mut file = File::open(path)?;
|
||||||
|
let mut contents = String::new();
|
||||||
|
file.read_to_string(&mut contents)?;
|
||||||
|
|
||||||
|
let mut config: PlayoutConfig = toml_edit::de::from_str(&contents)?;
|
||||||
|
|
||||||
|
config.playlist.start_sec = Some(time_to_sec(&config.playlist.day_start));
|
||||||
|
config.playlist.length_sec = Some(time_to_sec(&config.playlist.length));
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn playout_config(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
channel_id: &i32,
|
||||||
|
) -> Result<(PlayoutConfig, Channel), ServiceError> {
|
||||||
|
if let Ok(channel) = select_channel(conn, channel_id).await {
|
||||||
|
match read_playout_config(&channel.config_path.clone()) {
|
||||||
|
Ok(config) => return Ok((config, channel)),
|
||||||
|
Err(e) => error!("{e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(ServiceError::BadRequest(
|
||||||
|
"Error in getting config!".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn read_log_file(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
channel_id: &i32,
|
||||||
|
date: &str,
|
||||||
|
) -> Result<String, ServiceError> {
|
||||||
|
if let Ok(channel) = select_channel(conn, channel_id).await {
|
||||||
|
let mut date_str = "".to_string();
|
||||||
|
|
||||||
|
if !date.is_empty() {
|
||||||
|
date_str.push('.');
|
||||||
|
date_str.push_str(date);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(config) = read_playout_config(&channel.config_path) {
|
||||||
|
let mut log_path = Path::new(&config.logging.path)
|
||||||
|
.join("ffplayout.log")
|
||||||
|
.display()
|
||||||
|
.to_string();
|
||||||
|
log_path.push_str(&date_str);
|
||||||
|
|
||||||
|
let file_size = metadata(&log_path)?.len() as f64;
|
||||||
|
|
||||||
|
let file_content = if file_size > 5000000.0 {
|
||||||
|
error!("Log file to big: {}", sizeof_fmt(file_size));
|
||||||
|
format!("The log file is larger ({}) than the hard limit of 5MB, the probability is very high that something is wrong with the playout. Check this on the server with `less {log_path}`.", sizeof_fmt(file_size))
|
||||||
|
} else {
|
||||||
|
fs::read_to_string(log_path)?
|
||||||
|
};
|
||||||
|
|
||||||
|
return Ok(file_content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(ServiceError::NoContent(
|
||||||
|
"Requested log file not exists, or not readable.".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get human readable file size
|
||||||
|
pub fn sizeof_fmt(mut num: f64) -> String {
|
||||||
|
let suffix = 'B';
|
||||||
|
|
||||||
|
for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"] {
|
||||||
|
if num.abs() < 1024.0 {
|
||||||
|
return format!("{num:.1}{unit}{suffix}");
|
||||||
|
}
|
||||||
|
num /= 1024.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
format!("{num:.1}Yi{suffix}")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn local_utc_offset() -> i32 {
|
||||||
|
let mut offset = Local::now().format("%:z").to_string();
|
||||||
|
let operator = offset.remove(0);
|
||||||
|
let mut utc_offset = 0;
|
||||||
|
|
||||||
|
if let Some((r, f)) = offset.split_once(':') {
|
||||||
|
utc_offset = r.parse::<i32>().unwrap_or(0) * 60 + f.parse::<i32>().unwrap_or(0);
|
||||||
|
|
||||||
|
if operator == '-' && utc_offset > 0 {
|
||||||
|
utc_offset = -utc_offset;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
utc_offset
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn naive_date_time_from_str<'de, D>(deserializer: D) -> Result<NaiveDateTime, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let s: String = Deserialize::deserialize(deserializer)?;
|
||||||
|
|
||||||
|
match NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S") {
|
||||||
|
Ok(date_time) => Ok(date_time),
|
||||||
|
Err(e) => {
|
||||||
|
if e.kind() == ParseErrorKind::TooShort {
|
||||||
|
NaiveDateTime::parse_from_str(&format!("{s}T00:00:00"), "%Y-%m-%dT%H:%M:%S")
|
||||||
|
.map_err(de::Error::custom)
|
||||||
|
} else {
|
||||||
|
NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S%#z").map_err(de::Error::custom)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,21 +1,22 @@
|
|||||||
use std::{fs, path::PathBuf};
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
use log::*;
|
use simplelog::*;
|
||||||
|
use sqlx::{Pool, Sqlite};
|
||||||
|
|
||||||
use crate::player::controller::ChannelManager;
|
use crate::utils::{errors::ServiceError, files::norm_abs_path, playout_config};
|
||||||
use crate::player::utils::{json_reader, json_writer, JsonPlaylist};
|
use ffplayout_lib::utils::{
|
||||||
use crate::utils::{
|
generate_playlist as playlist_generator, json_reader, json_writer, JsonPlaylist, PlayoutConfig,
|
||||||
config::PlayoutConfig, errors::ServiceError, files::norm_abs_path,
|
|
||||||
generator::playlist_generator,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub async fn read_playlist(
|
pub async fn read_playlist(
|
||||||
config: &PlayoutConfig,
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
date: String,
|
date: String,
|
||||||
) -> Result<JsonPlaylist, ServiceError> {
|
) -> Result<JsonPlaylist, ServiceError> {
|
||||||
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
|
let (path, _, _) = norm_abs_path(&config.playlist.path, "")?;
|
||||||
|
let mut playlist_path = path;
|
||||||
let d: Vec<&str> = date.split('-').collect();
|
let d: Vec<&str> = date.split('-').collect();
|
||||||
let mut playlist_path = config.channel.playlists.clone();
|
|
||||||
|
|
||||||
playlist_path = playlist_path
|
playlist_path = playlist_path
|
||||||
.join(d[0])
|
.join(d[0])
|
||||||
.join(d[1])
|
.join(d[1])
|
||||||
@ -29,12 +30,14 @@ pub async fn read_playlist(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn write_playlist(
|
pub async fn write_playlist(
|
||||||
config: &PlayoutConfig,
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
json_data: JsonPlaylist,
|
json_data: JsonPlaylist,
|
||||||
) -> Result<String, ServiceError> {
|
) -> Result<String, ServiceError> {
|
||||||
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
let date = json_data.date.clone();
|
let date = json_data.date.clone();
|
||||||
|
let mut playlist_path = config.playlist.path;
|
||||||
let d: Vec<&str> = date.split('-').collect();
|
let d: Vec<&str> = date.split('-').collect();
|
||||||
let mut playlist_path = config.channel.playlists.clone();
|
|
||||||
|
|
||||||
if !playlist_path
|
if !playlist_path
|
||||||
.extension()
|
.extension()
|
||||||
@ -84,16 +87,17 @@ pub async fn write_playlist(
|
|||||||
Err(ServiceError::InternalServerError)
|
Err(ServiceError::InternalServerError)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_playlist(manager: ChannelManager) -> Result<JsonPlaylist, ServiceError> {
|
pub async fn generate_playlist(
|
||||||
let mut config = manager.config.lock().unwrap();
|
mut config: PlayoutConfig,
|
||||||
|
channel: String,
|
||||||
|
) -> Result<JsonPlaylist, ServiceError> {
|
||||||
if let Some(mut template) = config.general.template.take() {
|
if let Some(mut template) = config.general.template.take() {
|
||||||
for source in template.sources.iter_mut() {
|
for source in template.sources.iter_mut() {
|
||||||
let mut paths = vec![];
|
let mut paths = vec![];
|
||||||
|
|
||||||
for path in &source.paths {
|
for path in &source.paths {
|
||||||
let (safe_path, _, _) =
|
let (safe_path, _, _) =
|
||||||
norm_abs_path(&config.channel.storage, &path.to_string_lossy())?;
|
norm_abs_path(&config.storage.path, &path.to_string_lossy())?;
|
||||||
paths.push(safe_path);
|
paths.push(safe_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,9 +107,7 @@ pub fn generate_playlist(manager: ChannelManager) -> Result<JsonPlaylist, Servic
|
|||||||
config.general.template = Some(template);
|
config.general.template = Some(template);
|
||||||
}
|
}
|
||||||
|
|
||||||
drop(config);
|
match playlist_generator(&config, Some(channel)) {
|
||||||
|
|
||||||
match playlist_generator(&manager) {
|
|
||||||
Ok(playlists) => {
|
Ok(playlists) => {
|
||||||
if !playlists.is_empty() {
|
if !playlists.is_empty() {
|
||||||
Ok(playlists[0].clone())
|
Ok(playlists[0].clone())
|
||||||
@ -122,10 +124,14 @@ pub fn generate_playlist(manager: ChannelManager) -> Result<JsonPlaylist, Servic
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_playlist(config: &PlayoutConfig, date: &str) -> Result<String, ServiceError> {
|
pub async fn delete_playlist(
|
||||||
|
conn: &Pool<Sqlite>,
|
||||||
|
id: i32,
|
||||||
|
date: &str,
|
||||||
|
) -> Result<String, ServiceError> {
|
||||||
|
let (config, _) = playout_config(conn, &id).await?;
|
||||||
|
let mut playlist_path = PathBuf::from(&config.playlist.path);
|
||||||
let d: Vec<&str> = date.split('-').collect();
|
let d: Vec<&str> = date.split('-').collect();
|
||||||
let mut playlist_path = PathBuf::from(&config.channel.playlists);
|
|
||||||
|
|
||||||
playlist_path = playlist_path
|
playlist_path = playlist_path
|
||||||
.join(d[0])
|
.join(d[0])
|
||||||
.join(d[1])
|
.join(d[1])
|
@ -4,8 +4,8 @@ use local_ip_address::list_afinet_netifas;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use sysinfo::System;
|
use sysinfo::System;
|
||||||
|
|
||||||
use crate::utils::config::PlayoutConfig;
|
|
||||||
use crate::{DISKS, NETWORKS, SYS};
|
use crate::{DISKS, NETWORKS, SYS};
|
||||||
|
use ffplayout_lib::utils::PlayoutConfig;
|
||||||
|
|
||||||
const IGNORE_INTERFACES: [&str; 7] = ["docker", "lxdbr", "tab", "tun", "virbr", "veth", "vnet"];
|
const IGNORE_INTERFACES: [&str; 7] = ["docker", "lxdbr", "tab", "tun", "virbr", "veth", "vnet"];
|
||||||
|
|
||||||
@ -118,7 +118,7 @@ pub fn stat(config: PlayoutConfig) -> SystemStat {
|
|||||||
|
|
||||||
for disk in &*disks {
|
for disk in &*disks {
|
||||||
if disk.mount_point().to_string_lossy().len() > 1
|
if disk.mount_point().to_string_lossy().len() > 1
|
||||||
&& config.channel.storage.starts_with(disk.mount_point())
|
&& config.storage.path.starts_with(disk.mount_point())
|
||||||
{
|
{
|
||||||
storage.path = disk.name().to_string_lossy().to_string();
|
storage.path = disk.name().to_string_lossy().to_string();
|
||||||
storage.total = disk.total_space();
|
storage.total = disk.total_space();
|
@ -1,76 +1,36 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "ffplayout"
|
name = "ffplayout"
|
||||||
description.workspace = true
|
description = "24/7 playout based on rust and ffmpeg"
|
||||||
readme.workspace = true
|
readme = "README.md"
|
||||||
version.workspace = true
|
version.workspace = true
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
authors.workspace = true
|
authors.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
|
|
||||||
[features]
|
default-run = "ffplayout"
|
||||||
default = ["embed_frontend"]
|
|
||||||
embed_frontend = []
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-files = "0.6"
|
ffplayout-lib = { path = "../lib" }
|
||||||
actix-multipart = "0.7"
|
chrono = { version = "0.4", default-features = false, features = ["clock", "std"] }
|
||||||
actix-web = "4"
|
clap = { version = "4.3", features = ["derive"] }
|
||||||
actix-web-grants = "4"
|
crossbeam-channel = "0.5"
|
||||||
actix-web-httpauth = "0.8"
|
futures = "0.3"
|
||||||
actix-web-lab = "0.23"
|
itertools = "0.12"
|
||||||
actix-web-static-files = "4.0"
|
notify = "6.0"
|
||||||
argon2 = "0.5"
|
|
||||||
chrono = { version = "0.4", default-features = false, features = ["clock", "std", "serde"] }
|
|
||||||
clap = { version = "4.3", features = ["derive", "env"] }
|
|
||||||
derive_more = { version = "1", features = ["display"] }
|
|
||||||
faccess = "0.2"
|
|
||||||
ffprobe = "0.4"
|
|
||||||
flexi_logger = { version = "0.29", features = ["kv", "colors"] }
|
|
||||||
futures-util = { version = "0.3", default-features = false, features = ["std"] }
|
|
||||||
jsonwebtoken = "9"
|
|
||||||
lazy_static = "1.4"
|
|
||||||
lettre = { version = "0.11", features = ["builder", "rustls-tls", "smtp-transport", "tokio1", "tokio1-rustls-tls"], default-features = false }
|
|
||||||
lexical-sort = "0.3"
|
|
||||||
local-ip-address = "0.6"
|
|
||||||
log = { version = "0.4", features = ["std", "serde", "kv", "kv_std", "kv_sval", "kv_serde"] }
|
|
||||||
m3u8-rs = "6"
|
|
||||||
nix = { version = "0.29", features = ["user", "fs"] }
|
|
||||||
notify = "7.0"
|
|
||||||
notify-debouncer-full = { version = "*", default-features = false }
|
notify-debouncer-full = { version = "*", default-features = false }
|
||||||
num-traits = "0.2"
|
|
||||||
once_cell = "1"
|
|
||||||
paris = "1.5"
|
|
||||||
parking_lot = "0.12"
|
|
||||||
path-clean = "1.0"
|
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
regex = "1"
|
regex = "1"
|
||||||
relative-path = "1.8"
|
|
||||||
reqwest = { version = "0.12", default-features = false, features = ["blocking", "json", "rustls-tls"] }
|
reqwest = { version = "0.12", default-features = false, features = ["blocking", "json", "rustls-tls"] }
|
||||||
rpassword = "7.2"
|
|
||||||
sanitize-filename = "0.5"
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde_with = "3.8"
|
simplelog = { version = "0.12", features = ["paris"] }
|
||||||
shlex = "1.1"
|
tiny_http = { version = "0.12", default-features = false }
|
||||||
static-files = "0.2"
|
zeromq = { version = "0.3", default-features = false, features = [
|
||||||
sysinfo ={ version = "0.32", features = ["linux-netdevs", "linux-tmpfs"] }
|
"async-std-runtime",
|
||||||
sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"] }
|
|
||||||
time = { version = "0.3", features = ["formatting", "macros"] }
|
|
||||||
tokio = { version = "1.29", features = ["full"] }
|
|
||||||
tokio-stream = "0.1"
|
|
||||||
toml_edit = {version = "0.22", features = ["serde"]}
|
|
||||||
ts-rs = { version = "10", features = ["chrono-impl", "no-serde-warnings"] }
|
|
||||||
uuid = "1.8"
|
|
||||||
walkdir = "2"
|
|
||||||
zeromq = { version = "0.4", default-features = false, features = [
|
|
||||||
"tokio-runtime",
|
|
||||||
"tcp-transport",
|
"tcp-transport",
|
||||||
] }
|
] }
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
static-files = "0.2"
|
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "ffplayout"
|
name = "ffplayout"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
@ -82,32 +42,49 @@ priority = "optional"
|
|||||||
section = "net"
|
section = "net"
|
||||||
license-file = ["../LICENSE", "0"]
|
license-file = ["../LICENSE", "0"]
|
||||||
depends = ""
|
depends = ""
|
||||||
|
recommends = "sudo"
|
||||||
suggests = "ffmpeg"
|
suggests = "ffmpeg"
|
||||||
copyright = "Copyright (c) 2024, Jonathan Baecker. All rights reserved."
|
copyright = "Copyright (c) 2022, Jonathan Baecker. All rights reserved."
|
||||||
|
conf-files = ["/etc/ffplayout/ffplayout.toml", "/etc/ffplayout/advanced.toml"]
|
||||||
assets = [
|
assets = [
|
||||||
|
[
|
||||||
|
"../target/x86_64-unknown-linux-musl/release/ffpapi",
|
||||||
|
"/usr/bin/",
|
||||||
|
"755",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../target/x86_64-unknown-linux-musl/release/ffplayout",
|
"../target/x86_64-unknown-linux-musl/release/ffplayout",
|
||||||
"/usr/bin/",
|
"/usr/bin/",
|
||||||
"755",
|
"755",
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"../assets/ffpapi.service",
|
||||||
|
"/lib/systemd/system/",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../assets/ffplayout.service",
|
"../assets/ffplayout.service",
|
||||||
"/lib/systemd/system/",
|
"/lib/systemd/system/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"../assets/dummy.vtt",
|
"../assets/ffplayout@.service",
|
||||||
"/usr/share/ffplayout/",
|
"/lib/systemd/system/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"../assets/DejaVuSans.ttf",
|
"../assets/11-ffplayout",
|
||||||
"/usr/share/ffplayout/",
|
"/etc/sudoers.d/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"../assets/FONT_LICENSE.txt",
|
"../assets/advanced.toml",
|
||||||
"/usr/share/ffplayout/",
|
"/etc/ffplayout/",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"../assets/ffplayout.toml",
|
||||||
|
"/etc/ffplayout/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
@ -115,6 +92,11 @@ assets = [
|
|||||||
"/usr/share/ffplayout/",
|
"/usr/share/ffplayout/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"../assets/ffplayout.toml",
|
||||||
|
"/usr/share/ffplayout/ffplayout.toml.orig",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../assets/ffplayout.conf",
|
"../assets/ffplayout.conf",
|
||||||
"/usr/share/ffplayout/ffplayout.conf.example",
|
"/usr/share/ffplayout/ffplayout.conf.example",
|
||||||
@ -125,6 +107,11 @@ assets = [
|
|||||||
"/usr/share/doc/ffplayout/README",
|
"/usr/share/doc/ffplayout/README",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"../assets/ffpapi.1.gz",
|
||||||
|
"/usr/share/man/man1/",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../assets/ffplayout.1.gz",
|
"../assets/ffplayout.1.gz",
|
||||||
"/usr/share/man/man1/",
|
"/usr/share/man/man1/",
|
||||||
@ -132,33 +119,48 @@ assets = [
|
|||||||
],
|
],
|
||||||
]
|
]
|
||||||
maintainer-scripts = "../debian/"
|
maintainer-scripts = "../debian/"
|
||||||
systemd-units = { enable = true, unit-scripts = "../assets" }
|
systemd-units = { enable = false, unit-scripts = "../assets" }
|
||||||
|
|
||||||
[package.metadata.deb.variants.arm64]
|
[package.metadata.deb.variants.arm64]
|
||||||
assets = [
|
assets = [
|
||||||
|
[
|
||||||
|
"../target/aarch64-unknown-linux-gnu/release/ffpapi",
|
||||||
|
"/usr/bin/",
|
||||||
|
"755",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../target/aarch64-unknown-linux-gnu/release/ffplayout",
|
"../target/aarch64-unknown-linux-gnu/release/ffplayout",
|
||||||
"/usr/bin/",
|
"/usr/bin/",
|
||||||
"755",
|
"755",
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"../assets/ffpapi.service",
|
||||||
|
"/lib/systemd/system/",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../assets/ffplayout.service",
|
"../assets/ffplayout.service",
|
||||||
"/lib/systemd/system/",
|
"/lib/systemd/system/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"../assets/dummy.vtt",
|
"../assets/ffplayout@.service",
|
||||||
"/usr/share/ffplayout/",
|
"/lib/systemd/system/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"../assets/DejaVuSans.ttf",
|
"../assets/11-ffplayout",
|
||||||
"/usr/share/ffplayout/",
|
"/etc/sudoers.d/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"../assets/FONT_LICENSE.txt",
|
"../assets/ffplayout.toml",
|
||||||
"/usr/share/ffplayout/",
|
"/etc/ffplayout/",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"../assets/advanced.toml",
|
||||||
|
"/etc/ffplayout/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
@ -166,6 +168,11 @@ assets = [
|
|||||||
"/usr/share/ffplayout/",
|
"/usr/share/ffplayout/",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"../assets/ffplayout.toml",
|
||||||
|
"/usr/share/ffplayout/ffplayout.toml.orig",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../assets/ffplayout.conf",
|
"../assets/ffplayout.conf",
|
||||||
"/usr/share/ffplayout/ffplayout.conf.example",
|
"/usr/share/ffplayout/ffplayout.conf.example",
|
||||||
@ -176,6 +183,11 @@ assets = [
|
|||||||
"/usr/share/doc/ffplayout/README",
|
"/usr/share/doc/ffplayout/README",
|
||||||
"644",
|
"644",
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"../assets/ffpapi.1.gz",
|
||||||
|
"/usr/share/man/man1/",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"../assets/ffplayout.1.gz",
|
"../assets/ffplayout.1.gz",
|
||||||
"/usr/share/man/man1/",
|
"/usr/share/man/man1/",
|
||||||
@ -183,20 +195,25 @@ assets = [
|
|||||||
],
|
],
|
||||||
]
|
]
|
||||||
|
|
||||||
# RHEL RPM PACKAGE
|
# REHL RPM PACKAGE
|
||||||
[package.metadata.generate-rpm]
|
[package.metadata.generate-rpm]
|
||||||
name = "ffplayout"
|
name = "ffplayout"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
assets = [
|
assets = [
|
||||||
|
{ source = "../target/x86_64-unknown-linux-musl/release/ffpapi", dest = "/usr/bin/ffpapi", mode = "755" },
|
||||||
{ source = "../target/x86_64-unknown-linux-musl/release/ffplayout", dest = "/usr/bin/ffplayout", mode = "755" },
|
{ source = "../target/x86_64-unknown-linux-musl/release/ffplayout", dest = "/usr/bin/ffplayout", mode = "755" },
|
||||||
|
{ source = "../assets/advanced.toml", dest = "/etc/ffplayout/advanced.toml", mode = "644", config = true },
|
||||||
|
{ source = "../assets/ffplayout.toml", dest = "/etc/ffplayout/ffplayout.toml", mode = "644", config = true },
|
||||||
|
{ source = "../assets/ffpapi.service", dest = "/lib/systemd/system/ffpapi.service", mode = "644" },
|
||||||
{ source = "../assets/ffplayout.service", dest = "/lib/systemd/system/ffplayout.service", mode = "644" },
|
{ source = "../assets/ffplayout.service", dest = "/lib/systemd/system/ffplayout.service", mode = "644" },
|
||||||
|
{ source = "../assets/ffplayout@.service", dest = "/lib/systemd/system/ffplayout@.service", mode = "644" },
|
||||||
|
{ source = "../assets/11-ffplayout", dest = "/etc/sudoers.d/11-ffplayout", mode = "644" },
|
||||||
{ source = "../README.md", dest = "/usr/share/doc/ffplayout/README", mode = "644" },
|
{ source = "../README.md", dest = "/usr/share/doc/ffplayout/README", mode = "644" },
|
||||||
|
{ source = "../assets/ffpapi.1.gz", dest = "/usr/share/man/man1/ffpapi.1.gz", mode = "644", doc = true },
|
||||||
{ source = "../assets/ffplayout.1.gz", dest = "/usr/share/man/man1/ffplayout.1.gz", mode = "644", doc = true },
|
{ source = "../assets/ffplayout.1.gz", dest = "/usr/share/man/man1/ffplayout.1.gz", mode = "644", doc = true },
|
||||||
{ source = "../LICENSE", dest = "/usr/share/doc/ffplayout/LICENSE", mode = "644" },
|
{ source = "../LICENSE", dest = "/usr/share/doc/ffplayout/LICENSE", mode = "644" },
|
||||||
{ source = "../assets/dummy.vtt", dest = "/usr/share/ffplayout/dummy.vtt", mode = "644" },
|
|
||||||
{ source = "../assets/DejaVuSans.ttf", dest = "/usr/share/ffplayout/DejaVuSans.ttf", mode = "644" },
|
|
||||||
{ source = "../assets/FONT_LICENSE.txt", dest = "/usr/share/ffplayout/FONT_LICENSE.txt", mode = "644" },
|
|
||||||
{ source = "../assets/logo.png", dest = "/usr/share/ffplayout/logo.png", mode = "644" },
|
{ source = "../assets/logo.png", dest = "/usr/share/ffplayout/logo.png", mode = "644" },
|
||||||
|
{ source = "../assets/ffplayout.toml", dest = "/usr/share/ffplayout/ffplayout.toml.orig", mode = "644" },
|
||||||
{ source = "../assets/ffplayout.conf", dest = "/usr/share/ffplayout/ffplayout.conf.example", mode = "644" },
|
{ source = "../assets/ffplayout.conf", dest = "/usr/share/ffplayout/ffplayout.conf.example", mode = "644" },
|
||||||
{ source = "../debian/postinst", dest = "/usr/share/ffplayout/postinst", mode = "755" },
|
{ source = "../debian/postinst", dest = "/usr/share/ffplayout/postinst", mode = "755" },
|
||||||
]
|
]
|
34
ffplayout-engine/README.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
**ffplayout-engine**
|
||||||
|
================
|
||||||
|
|
||||||
|
Start with Arguments
|
||||||
|
-----
|
||||||
|
|
||||||
|
ffplayout also allows the passing of parameters:
|
||||||
|
|
||||||
|
```
|
||||||
|
OPTIONS:
|
||||||
|
-c, --config <CONFIG> File path to ffplayout.yml
|
||||||
|
-d, --date <DATE> Target date (YYYY-MM-DD) for text/m3u to playlist import
|
||||||
|
-f, --folder <FOLDER> Play folder content
|
||||||
|
--fake-time <FAKE_TIME> fake date time, for debugging
|
||||||
|
-g, --generate <YYYY-MM-DD>... Generate playlist for dates, like: 2022-01-01 - 2022-01-10
|
||||||
|
-h, --help Print help information
|
||||||
|
-i, --infinit Loop playlist infinitely
|
||||||
|
--import <IMPORT> Import a given text/m3u file and create a playlist from it
|
||||||
|
-l, --log <LOG> File path for logging
|
||||||
|
-m, --play-mode <PLAY_MODE> Playing mode: folder, playlist
|
||||||
|
-o, --output <OUTPUT> Set output mode: desktop, hls, stream
|
||||||
|
-p, --playlist <PLAYLIST> Path from playlist
|
||||||
|
-s, --start <START> Start time in 'hh:mm:ss', 'now' for start with first
|
||||||
|
-t, --length <LENGTH> Set length in 'hh:mm:ss', 'none' for no length check
|
||||||
|
-v, --volume <VOLUME> Set audio volume
|
||||||
|
-V, --version Print version information
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You can run the command like:
|
||||||
|
|
||||||
|
```Bash
|
||||||
|
./ffplayout -l none -p ~/playlist.json -o desktop
|
||||||
|
```
|
@ -9,16 +9,15 @@ use std::{
|
|||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::*;
|
|
||||||
use notify::{
|
use notify::{
|
||||||
event::{CreateKind, ModifyKind, RemoveKind, RenameMode},
|
event::{CreateKind, ModifyKind, RemoveKind, RenameMode},
|
||||||
EventKind::{Create, Modify, Remove},
|
EventKind::{Create, Modify, Remove},
|
||||||
RecursiveMode,
|
RecursiveMode, Watcher,
|
||||||
};
|
};
|
||||||
use notify_debouncer_full::new_debouncer;
|
use notify_debouncer_full::new_debouncer;
|
||||||
|
use simplelog::*;
|
||||||
|
|
||||||
use crate::player::utils::{include_file_extension, Media};
|
use ffplayout_lib::utils::{include_file_extension, Media, PlayoutConfig};
|
||||||
use crate::utils::{config::PlayoutConfig, logging::Target};
|
|
||||||
|
|
||||||
/// Create a watcher, which monitor file changes.
|
/// Create a watcher, which monitor file changes.
|
||||||
/// When a change is register, update the current file list.
|
/// When a change is register, update the current file list.
|
||||||
@ -28,11 +27,10 @@ pub fn watchman(
|
|||||||
is_terminated: Arc<AtomicBool>,
|
is_terminated: Arc<AtomicBool>,
|
||||||
sources: Arc<Mutex<Vec<Media>>>,
|
sources: Arc<Mutex<Vec<Media>>>,
|
||||||
) {
|
) {
|
||||||
let id = config.general.channel_id;
|
let path = Path::new(&config.storage.path);
|
||||||
let path = Path::new(&config.channel.storage);
|
|
||||||
|
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
error!(target: Target::file_mail(), channel = id; "Folder path not exists: '{path:?}'");
|
error!("Folder path not exists: '{path:?}'");
|
||||||
panic!("Folder path not exists: '{path:?}'");
|
panic!("Folder path not exists: '{path:?}'");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,7 +39,11 @@ pub fn watchman(
|
|||||||
|
|
||||||
let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx).unwrap();
|
let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx).unwrap();
|
||||||
|
|
||||||
debouncer.watch(path, RecursiveMode::Recursive).unwrap();
|
debouncer
|
||||||
|
.watcher()
|
||||||
|
.watch(path, RecursiveMode::Recursive)
|
||||||
|
.unwrap();
|
||||||
|
debouncer.cache().add_root(path, RecursiveMode::Recursive);
|
||||||
|
|
||||||
while !is_terminated.load(Ordering::SeqCst) {
|
while !is_terminated.load(Ordering::SeqCst) {
|
||||||
if let Ok(result) = rx.try_recv() {
|
if let Ok(result) = rx.try_recv() {
|
||||||
@ -55,7 +57,7 @@ pub fn watchman(
|
|||||||
let media = Media::new(index, &new_path.to_string_lossy(), false);
|
let media = Media::new(index, &new_path.to_string_lossy(), false);
|
||||||
|
|
||||||
sources.lock().unwrap().push(media);
|
sources.lock().unwrap().push(media);
|
||||||
info!(target: Target::file_mail(), channel = id; "Create new file: <b><magenta>{new_path:?}</></b>");
|
info!("Create new file: <b><magenta>{new_path:?}</></b>");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Remove(RemoveKind::File) | Modify(ModifyKind::Name(RenameMode::From)) => {
|
Remove(RemoveKind::File) | Modify(ModifyKind::Name(RenameMode::From)) => {
|
||||||
@ -66,7 +68,7 @@ pub fn watchman(
|
|||||||
.lock()
|
.lock()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.retain(|x| x.source != old_path.to_string_lossy());
|
.retain(|x| x.source != old_path.to_string_lossy());
|
||||||
info!(target: Target::file_mail(), channel = id; "Remove file: <b><magenta>{old_path:?}</></b>");
|
info!("Remove file: <b><magenta>{old_path:?}</></b>");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Modify(ModifyKind::Name(RenameMode::Both)) => {
|
Modify(ModifyKind::Name(RenameMode::Both)) => {
|
||||||
@ -80,18 +82,18 @@ pub fn watchman(
|
|||||||
.position(|x| *x.source == old_path.display().to_string()) {
|
.position(|x| *x.source == old_path.display().to_string()) {
|
||||||
let media = Media::new(index, &new_path.to_string_lossy(), false);
|
let media = Media::new(index, &new_path.to_string_lossy(), false);
|
||||||
media_list[index] = media;
|
media_list[index] = media;
|
||||||
info!(target: Target::file_mail(), channel = id; "Move file: <b><magenta>{old_path:?}</></b> to <b><magenta>{new_path:?}</></b>");
|
info!("Move file: <b><magenta>{old_path:?}</></b> to <b><magenta>{new_path:?}</></b>");
|
||||||
} else if include_file_extension(&config, new_path) {
|
} else if include_file_extension(&config, new_path) {
|
||||||
let index = media_list.len();
|
let index = media_list.len();
|
||||||
let media = Media::new(index, &new_path.to_string_lossy(), false);
|
let media = Media::new(index, &new_path.to_string_lossy(), false);
|
||||||
|
|
||||||
media_list.push(media);
|
media_list.push(media);
|
||||||
info!(target: Target::file_mail(), channel = id; "Create new file: <b><magenta>{new_path:?}</></b>");
|
info!("Create new file: <b><magenta>{new_path:?}</></b>");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => trace!(target: Target::file_mail(), channel = id; "Not tracked file event: {event:?}")
|
_ => debug!("Not tracked file event: {event:?}")
|
||||||
}),
|
}),
|
||||||
Err(errors) => errors.iter().for_each(|error| error!(target: Target::file_mail(), channel = id; "{error:?}")),
|
Err(errors) => errors.iter().for_each(|error| error!("{error:?}")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
167
ffplayout-engine/src/input/ingest.rs
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
use std::{
|
||||||
|
io::{BufRead, BufReader, Error, Read},
|
||||||
|
process::{exit, ChildStderr, Command, Stdio},
|
||||||
|
sync::atomic::Ordering,
|
||||||
|
thread,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crossbeam_channel::Sender;
|
||||||
|
use simplelog::*;
|
||||||
|
|
||||||
|
use crate::utils::{log_line, valid_stream};
|
||||||
|
use ffplayout_lib::{
|
||||||
|
utils::{
|
||||||
|
controller::ProcessUnit::*, test_tcp_port, Media, PlayoutConfig, ProcessControl,
|
||||||
|
FFMPEG_IGNORE_ERRORS, FFMPEG_UNRECOVERABLE_ERRORS,
|
||||||
|
},
|
||||||
|
vec_strings,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn server_monitor(
|
||||||
|
level: &str,
|
||||||
|
ignore: Vec<String>,
|
||||||
|
buffer: BufReader<ChildStderr>,
|
||||||
|
proc_ctl: ProcessControl,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
for line in buffer.lines() {
|
||||||
|
let line = line?;
|
||||||
|
|
||||||
|
if !FFMPEG_IGNORE_ERRORS.iter().any(|i| line.contains(*i))
|
||||||
|
&& !ignore.iter().any(|i| line.contains(i))
|
||||||
|
{
|
||||||
|
log_line(&line, level);
|
||||||
|
}
|
||||||
|
|
||||||
|
if line.contains("rtmp") && line.contains("Unexpected stream") && !valid_stream(&line) {
|
||||||
|
if let Err(e) = proc_ctl.stop(Ingest) {
|
||||||
|
error!("{e}");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if FFMPEG_UNRECOVERABLE_ERRORS
|
||||||
|
.iter()
|
||||||
|
.any(|i| line.contains(*i))
|
||||||
|
{
|
||||||
|
proc_ctl.stop_all();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ffmpeg Ingest Server
|
||||||
|
///
|
||||||
|
/// Start ffmpeg in listen mode, and wait for input.
|
||||||
|
pub fn ingest_server(
|
||||||
|
config: PlayoutConfig,
|
||||||
|
ingest_sender: Sender<(usize, [u8; 65088])>,
|
||||||
|
proc_control: ProcessControl,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let mut buffer: [u8; 65088] = [0; 65088];
|
||||||
|
let mut server_cmd = vec_strings!["-hide_banner", "-nostats", "-v", "level+info"];
|
||||||
|
let stream_input = config.ingest.input_cmd.clone().unwrap();
|
||||||
|
let mut dummy_media = Media::new(0, "Live Stream", false);
|
||||||
|
dummy_media.unit = Ingest;
|
||||||
|
dummy_media.add_filter(&config, &None);
|
||||||
|
|
||||||
|
if let Some(ingest_input_cmd) = config
|
||||||
|
.advanced
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|a| a.ingest.input_cmd.clone())
|
||||||
|
{
|
||||||
|
server_cmd.append(&mut ingest_input_cmd.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
server_cmd.append(&mut stream_input.clone());
|
||||||
|
|
||||||
|
if let Some(mut filter) = dummy_media.filter {
|
||||||
|
server_cmd.append(&mut filter.cmd());
|
||||||
|
server_cmd.append(&mut filter.map());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(mut cmd) = config.processing.cmd {
|
||||||
|
server_cmd.append(&mut cmd);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut is_running;
|
||||||
|
|
||||||
|
if let Some(url) = stream_input.iter().find(|s| s.contains("://")) {
|
||||||
|
if !test_tcp_port(url) {
|
||||||
|
proc_control.stop_all();
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Start ingest server, listening on: <b><magenta>{url}</></b>",);
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
"Server CMD: <bright-blue>\"ffmpeg {}\"</>",
|
||||||
|
server_cmd.join(" ")
|
||||||
|
);
|
||||||
|
|
||||||
|
while !proc_control.is_terminated.load(Ordering::SeqCst) {
|
||||||
|
let proc_ctl = proc_control.clone();
|
||||||
|
let level = config.logging.ingest_level.clone().unwrap();
|
||||||
|
let ignore = config.logging.ignore_lines.clone();
|
||||||
|
let mut server_proc = match Command::new("ffmpeg")
|
||||||
|
.args(server_cmd.clone())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
{
|
||||||
|
Err(e) => {
|
||||||
|
error!("couldn't spawn ingest server: {e}");
|
||||||
|
panic!("couldn't spawn ingest server: {e}")
|
||||||
|
}
|
||||||
|
Ok(proc) => proc,
|
||||||
|
};
|
||||||
|
let mut ingest_reader = BufReader::new(server_proc.stdout.take().unwrap());
|
||||||
|
let server_err = BufReader::new(server_proc.stderr.take().unwrap());
|
||||||
|
let error_reader_thread =
|
||||||
|
thread::spawn(move || server_monitor(&level, ignore, server_err, proc_ctl));
|
||||||
|
|
||||||
|
*proc_control.server_term.lock().unwrap() = Some(server_proc);
|
||||||
|
is_running = false;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let bytes_len = match ingest_reader.read(&mut buffer[..]) {
|
||||||
|
Ok(length) => length,
|
||||||
|
Err(e) => {
|
||||||
|
debug!("Ingest server read {e:?}");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !is_running {
|
||||||
|
proc_control.server_is_running.store(true, Ordering::SeqCst);
|
||||||
|
is_running = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if bytes_len > 0 {
|
||||||
|
if let Err(e) = ingest_sender.send((bytes_len, buffer)) {
|
||||||
|
error!("Ingest server write error: {e:?}");
|
||||||
|
|
||||||
|
proc_control.is_terminated.store(true, Ordering::SeqCst);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop(ingest_reader);
|
||||||
|
proc_control
|
||||||
|
.server_is_running
|
||||||
|
.store(false, Ordering::SeqCst);
|
||||||
|
|
||||||
|
if let Err(e) = proc_control.wait(Ingest) {
|
||||||
|
error!("{e}")
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(e) = error_reader_thread.join() {
|
||||||
|
error!("{e:?}");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
51
ffplayout-engine/src/input/mod.rs
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
use std::{
|
||||||
|
sync::{atomic::AtomicBool, Arc},
|
||||||
|
thread,
|
||||||
|
};
|
||||||
|
|
||||||
|
use simplelog::*;
|
||||||
|
|
||||||
|
use ffplayout_lib::utils::{Media, PlayoutConfig, PlayoutStatus, ProcessMode::*};
|
||||||
|
|
||||||
|
pub mod folder;
|
||||||
|
pub mod ingest;
|
||||||
|
pub mod playlist;
|
||||||
|
|
||||||
|
pub use folder::watchman;
|
||||||
|
pub use ingest::ingest_server;
|
||||||
|
pub use playlist::CurrentProgram;
|
||||||
|
|
||||||
|
use ffplayout_lib::utils::{controller::PlayerControl, folder::FolderSource};
|
||||||
|
|
||||||
|
/// Create a source iterator from playlist, or from folder.
|
||||||
|
pub fn source_generator(
|
||||||
|
config: PlayoutConfig,
|
||||||
|
player_control: &PlayerControl,
|
||||||
|
playout_stat: PlayoutStatus,
|
||||||
|
is_terminated: Arc<AtomicBool>,
|
||||||
|
) -> Box<dyn Iterator<Item = Media>> {
|
||||||
|
match config.processing.mode {
|
||||||
|
Folder => {
|
||||||
|
info!("Playout in folder mode");
|
||||||
|
debug!(
|
||||||
|
"Monitor folder: <b><magenta>{:?}</></b>",
|
||||||
|
config.storage.path
|
||||||
|
);
|
||||||
|
|
||||||
|
let config_clone = config.clone();
|
||||||
|
let folder_source = FolderSource::new(&config, playout_stat.chain, player_control);
|
||||||
|
let node_clone = folder_source.player_control.current_list.clone();
|
||||||
|
|
||||||
|
// Spawn a thread to monitor folder for file changes.
|
||||||
|
thread::spawn(move || watchman(config_clone, is_terminated.clone(), node_clone));
|
||||||
|
|
||||||
|
Box::new(folder_source) as Box<dyn Iterator<Item = Media>>
|
||||||
|
}
|
||||||
|
Playlist => {
|
||||||
|
info!("Playout in playlist mode");
|
||||||
|
let program = CurrentProgram::new(&config, playout_stat, is_terminated, player_control);
|
||||||
|
|
||||||
|
Box::new(program) as Box<dyn Iterator<Item = Media>>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|