169 Commits

Author SHA1 Message Date
meeb
c6acd5378c move TIME_ZONE set by env var from local_settings to settings, resolves #462 2024-02-02 05:51:20 +11:00
meeb
e7788eb8fb Merge pull request #450 from InterN0te/main-delete-files-on-disk
Following Delete files on disk #426
2024-01-17 22:31:09 +11:00
meeb
e4e0b48c0b Merge pull request #460 from skayred/main
Optimized source page and dashboard loading
2024-01-17 22:29:59 +11:00
Dmitrii Savchenko
3573c1187f Optimized source page and dashboard loading 2024-01-16 11:57:00 +02:00
meeb
b11b667aff Merge pull request #452 from ShaneBridges1234/patch-1
Update other-database-backends.md
2024-01-03 16:06:17 +11:00
Yottatron
1b581aa4ba Update other-database-backends.md
Correct table new in SQL for MariaDB column compression.
2024-01-01 09:16:04 -08:00
meeb
7384c00713 fix typo in sponsorblock categories, remove dupe metadata flags, related to #362 2023-12-12 16:28:26 +11:00
meeb
4fdd172b05 tidy up and pass a serialised list through to youtube.download_media, may help with #362 2023-12-12 14:40:23 +11:00
Someone
9c18115032 Merge branch 'meeb:main' into main-delete-files-on-disk 2023-12-11 15:41:50 +01:00
meeb
6853c1fa76 fix tests 2023-12-11 13:42:33 +11:00
administrator
ed07073cf4 Revert "Removed non-pertinent source option"
This reverts commit 46ba2593a2.

Restore option
2023-12-11 03:38:37 +01:00
administrator
af94b37ee6 Revert "Removed non-pertinent source option"
This reverts commit ad1d49a835.

t cherry-pick 46a43b9

Restore option
2023-12-11 03:38:14 +01:00
administrator
ad1d49a835 Removed non-pertinent source option 2023-12-11 03:25:17 +01:00
administrator
46ba2593a2 Removed non-pertinent source option 2023-12-11 03:19:43 +01:00
administrator
46a43b968a Rework delete method to delete all files matching filename
Remove Source folder if checkbox 'remove media' is checked
2023-12-11 02:29:57 +01:00
administrator
805a0eefbd Merge branch 'delete-files-on-disk' of https://github.com/sparklesmcfadden/tubesync into main-delete-files-on-disk
Merge from sparklesmcfadden:delete-files-on-disk
2023-12-11 00:19:37 +01:00
meeb
3a87b5779e Merge pull request #448 from InterN0te/main-addNfoInfo
Added season and episode tags in NFO to get Jellyfin displaying correctly
2023-12-10 17:58:04 +11:00
Someone
f86e72aa92 Optimization of episode calculation 2023-12-09 23:13:28 +01:00
Someone
f550e32b5e Fix secondary sorting on video key 2023-12-09 19:33:59 +01:00
Someone
034d877d6a Add season and episode tags in NFO test 2023-12-09 17:56:32 +01:00
Someone
b9b702ab85 Add season and episode tags in NFO 2023-12-09 17:55:17 +01:00
meeb
c159c24d15 Merge pull request #443 from depuhitv/patch-1
compress sync_media table for mariadb
2023-12-04 18:05:02 +11:00
depuhitv
6c9772d573 fixed grammar 2023-12-04 16:26:46 +11:00
depuhitv
45b8b3f65b compress sync_media table for mariadb
Added steps to compress sync_media table for mariadb.
For 1,608 records, I am seeing the size reduced from 642.8 MB to 55.8 MB
2023-12-04 16:18:10 +11:00
meeb
7aa9c0ec8a bump to 0.13.3 2023-11-30 18:58:29 +11:00
meeb
e54a762a7b rework skip logic check, prevent race condition between metadata downloading and upload date being checked, resolves #440, #183, related to #438 2023-11-30 18:52:32 +11:00
meeb
512b70adad toggle logging verbosity based on settings.DEBUG 2023-11-30 18:50:22 +11:00
meeb
6c21ff15ab stopcontainer helper 2023-11-30 18:49:58 +11:00
meeb
adf26cb4e3 bump ffmpeg to autobuild-2023-11-29-14-19 2023-11-30 18:49:50 +11:00
meeb
45c12561ba Merge pull request #438 from locke4/main
Fix signals.py mistake
2023-11-29 04:05:13 +11:00
locke4
2d6f485a5d Update signals.py 2023-11-28 08:48:31 +00:00
meeb
33b471175a Merge pull request #425 from locke4/main
Add support for regex video title filtering
2023-11-20 16:53:58 +11:00
meeb
7f4e8586b7 Merge pull request #435 from klinker41/patch-1
Update other-database-backends.md
2023-11-20 16:51:00 +11:00
Jake Klinker
bab4b9b056 Update other-database-backends.md
Add documentation about how to use a docker compose postgres container and connect it to tubesync. This seems like a fairly basic use case that many users would want to implement, given the large performance benefits it brings.
2023-11-19 10:23:07 -07:00
meeb
30c2127271 bump ffmpeg to 2023-11-14 and yt-dlp to 2023.11.16 2023-11-16 18:54:57 +11:00
locke4
d1cb7ef76c Delete tubesync/sync/migrations/0020_auto_20231024_1812.py 2023-10-24 19:26:50 +01:00
locke4
1fd4f87c53 Merge pull request #8 from locke4/fix-pagenums
Ran makemigrations
2023-10-24 19:25:52 +01:00
locke4
cf06f4cbc2 Merge pull request #7 from locke4/locke4-patch-2
Updated according to comments on PR
2023-10-24 18:38:17 +01:00
locke4
0523f481d2 Updated according to comments on PR
Fixed whitespace

Update tests.py

Ran makemigrations

Update models.py

Update tests.py

Update models.py

Update tests.py

Update models.py

Update models.py

Update tests.py

Update models.py

Update tests.py

Update tests.py

Update tests.py

Update models.py

Update models.py

Update tests.py

Update models.py

Update models.py

Update tests.py

Update tests.py

Update signals.py

Update tasks.py

Update signals.py

Update models.py

Update tasks.py

Update signals.py

Update tasks.py

Update models.py
2023-10-24 18:37:09 +01:00
locke4
aa4bd4ec26 Ran makemigrations 2023-10-24 18:17:56 +01:00
locke4
96d9ee93ef Merge pull request #6 from locke4/fix-pagenums
Fix pagenums for "only_skipped" query param
2023-10-22 13:39:11 +01:00
sparklesmcfadden
43cf532903 Adds option to delete files on disk when removing a media item from TubeSync 2023-10-21 20:54:21 -05:00
locke4
8240c49d5c Update ci.yaml 2023-10-22 02:42:57 +01:00
locke4
0c5e3d3818 Update media.html 2023-10-22 02:30:24 +01:00
locke4
22edd1bbda Update pagination.html 2023-10-22 02:25:19 +01:00
locke4
fea0bb191e Fix typo 2023-10-21 21:23:57 +01:00
locke4
0f65a4027a Add support for regex filters on video names
Update views.py
Update tests.py
Update source.html
Update tasks.py
Update signals.py
Update 0001_initial.py
Update models.py
Update models.py
Update tests.py
2023-10-21 21:07:15 +01:00
meeb
5cac374486 Merge pull request #420 from sparklesmcfadden/delete-removed-media
Adds workflow to delete local media that no longer exists in the source
2023-10-21 14:31:39 +11:00
meeb
69efc9298d Merge pull request #423 from ltomes/patch-1
Update other-database-backends.md
2023-10-21 14:30:06 +11:00
Levi Tomes
1be8dff769 Update other-database-backends.md
django-admin only ran the loaddata for me with the - before the format flag.
2023-10-20 18:22:40 -05:00
cavanfarrell
350e544594 Fixes formatting 2023-10-20 10:25:20 -05:00
cavanfarrell
0542c734e5 Adds workflow to delete local media that no longer exists in the source 2023-10-20 10:19:57 -05:00
meeb
42b337c408 bump ffmpeg to autobuild-2023-10-11-14-20 2023-10-12 15:50:38 +11:00
meeb
2f82f8c599 fix tests 2023-10-12 15:44:51 +11:00
meeb
b57ca110b0 bump to 0.13.1 2023-10-12 15:34:33 +11:00
meeb
e3e7352600 add uploader variable, resolves #270 2023-10-12 15:33:58 +11:00
meeb
6d3a7bf859 move metadata collection to a higher priority over thumbnails, resolves #418 2023-10-12 15:27:19 +11:00
meeb
25f622311f bump to 0.13.0 2023-09-25 18:47:21 +10:00
meeb
adea4a0ecd bump ffmpeg to autobuild-2023-09-24-14-11 2023-09-25 18:45:45 +10:00
meeb
0d76f2f94e bump s6 to 3.1.5.0 2023-09-25 18:37:29 +10:00
meeb
71578d926e fix tests after subs lang pr 2023-09-25 18:31:32 +10:00
meeb
777cdb5ecc Merge pull request #406 from pacoccino/subtitles
Subtitles
2023-09-05 06:33:26 +10:00
pacoccino
3dd445bf96 Add a validator for sub_lang 2023-09-04 14:58:57 +02:00
pacoccino
86744c0510 Remove extension edits 2023-09-02 14:37:07 +02:00
pacoccino
be7454f72a Add subtitles config into sources model 2023-09-02 14:30:23 +02:00
pacoccino
e9f03cb6bf download subtitles draft 2023-08-31 22:40:29 +02:00
meeb
ddc127e6af bump libs, bump ffmpeg to autobuild-2023-08-12-14-12, resolves #399 2023-08-13 17:43:43 +10:00
meeb
63d32a1e11 replace PIL.Image.ANTIALIAS with PIL.Image.LANCZOS, resolves #392 2023-07-16 00:06:05 +10:00
meeb
2ebbb8480e bump ffmpeg to 2023-07-14-14-08 2023-07-15 14:32:16 +10:00
meeb
21785e031a add requests[socks], resolves #391 2023-07-15 14:23:23 +10:00
meeb
f12e13162f ignore media formats which do not have acodecs or vcodecs in their respective matchers, resolves #386 2023-06-29 23:48:35 +10:00
meeb
5c9c1550bf make shell helper 2023-06-29 23:30:47 +10:00
meeb
12638afb60 bump container image base to debian bookworm, update ffmpeg to 2023-06-27 and yt-dlp to 2023-06-22, rework python packages installation after bookworm update 2023-06-28 02:54:09 +10:00
meeb
b9886a3b27 Merge pull request #381 from a-kr/fix_cleanup_old_media
in cleanup_old_media, filter in database rather than in Python
2023-05-27 14:03:35 +10:00
Alexey Kryuchkov
612f78e7eb in cleanup_old_media, filter in database rather than in Python 2023-05-27 01:28:15 +03:00
meeb
0c5a9c53f8 Merge pull request #376 from gautamkrishnar/fix/dockerfile
fixing unavailable ffmpeg version
2023-05-04 12:38:27 +10:00
Gautam krishna R
d439b2f223 fixing unavailable ffmpeg version
fixing unavailable ffmpeg version
2023-05-03 21:33:16 +05:30
meeb
7116617cd2 Merge pull request #374 from garbled1/latest_dl_fix
Fix #364 by checking the filesize is not null.
2023-05-03 02:02:14 +10:00
garbled1
422d228359 Fix #364 by checking the filesize is not null. 2023-05-02 08:24:50 -07:00
meeb
1f68be5c26 update ffmpeg to 2023-04-13-12-52 2023-04-14 12:28:38 +10:00
meeb
089a487f3a add additional library ID help link, resolves #370 2023-04-14 11:45:52 +10:00
meeb
24ae70ea70 add reset-metadata command, related to #287 2023-04-05 11:02:21 +10:00
meeb
72c3242e70 add TUBESYNC_RESET_DOWNLOAD_DIR env var to toggle resetting permissions on /downloads in the container on start, resolves #354 2023-03-26 14:05:47 +11:00
meeb
f3e93c0ecf bump ffmpeg to autobuild-2023-03-23-15-58 2023-03-24 13:17:12 +11:00
meeb
fa8efb178e allow easy container env var override of HEALTHCHECK_ALLOWED_IPS, resolves #168 2023-03-24 13:02:16 +11:00
meeb
2001faea44 Merge pull request #358 from darmiel/fix/font-weight
fix: bold font weight
2023-03-11 15:49:06 +11:00
darmiel
b370e98031 fix: bold font weight 2023-03-10 14:36:29 +01:00
meeb
55bfd911b9 catch typeerrors for duration metadata, resolves #248 2023-03-10 18:23:49 +11:00
meeb
e47d0eb7be Merge pull request #357 from darmiel/fix/bump-ffpmeg
chore: bump ffmpeg to `109977-gaca7ef78cc`
2023-03-10 05:00:15 +11:00
darmiel
a95c64bc10 chore: bump ffmpeg to 109977-gaca7ef78cc 2023-03-09 14:58:41 +01:00
meeb
e9d4f89f39 fix connection kwarg to db_type() in custom field to be compatible with the postgresql backend, resolves #347 2023-02-21 13:55:03 +11:00
meeb
7876b48860 use backend agnostic text type for custom field, related to #345 and #338 2023-02-20 14:56:28 +11:00
meeb
2639d911ab change sponsorblock_categories to a textfield, fixing max charlen=255 for mysql, related to #338 2023-02-20 13:24:38 +11:00
meeb
e4c0f0e98a Merge pull request #338 from kuhnchris/embed-thumbnail
Configurations in Sources
2023-02-20 11:23:18 +11:00
KuhnChris
601449ce08 migrate migrations; split fields into fields.py 2023-02-19 23:44:48 +01:00
KuhnChris
fe4c876fdc "source" overview, fix some edge case(s) 2023-02-18 14:03:32 +01:00
KuhnChris
fbe9546a74 Merge branch 'meeb-main' into embed-thumbnail 2023-02-18 11:38:59 +01:00
KuhnChris
ce14167cee formating 2023-02-18 11:38:23 +01:00
KuhnChris
c927f32aa6 ffmpeg embed thumbnails, configuration 2023-02-18 11:37:28 +01:00
KuhnChris
1d5579aa31 Phase 1 - extend model for new fields 2023-02-18 11:35:45 +01:00
meeb
d8a9572411 bump ffmpeg, fix container build 2023-02-18 13:14:25 +11:00
meeb
8315efac03 bump to 0.12.1, resolves #340 and #341 2023-02-18 12:59:57 +11:00
meeb
35678e3be9 temporarily disable sponsorblock by default pending #338 2023-02-18 12:54:39 +11:00
meeb
e75b446883 Merge pull request #334 from kuhnchris/skip-manual
Adding new "manual_skip" field; adapt UI
2023-02-18 12:18:00 +11:00
meeb
dd05595558 Merge pull request #337 from kuhnchris/align-checkboxtext
align (i) better with text (+ checkbox less wide)
2023-02-15 11:56:56 +11:00
KuhnChris
2772e85d9f ffmpeg embed thumbnails, configuration 2023-02-15 00:01:44 +01:00
KuhnChris
931aa78815 align (i) better with text (+ checkbox less wide) 2023-02-14 22:06:15 +01:00
KuhnChris
24a49d2f14 Phase 1 - extend model for new fields 2023-02-14 21:52:50 +01:00
meeb
f14d2dd29e Merge pull request #335 from kuhnchris/delete_index_source
del `delete_index_source_task` calls - solves #333
2023-02-14 18:55:57 +11:00
KuhnChris
f4e5b6e76c del delete_index_source_task calls - solves #333 2023-02-13 14:23:41 +01:00
KuhnChris
977f996d8e Adding new "manual_skip" field; adapt UI 2023-02-13 07:46:16 +01:00
meeb
dc5491455c Merge pull request #331 from kuhnchris/sync-now-v2
"Sync now" button
2023-02-13 11:17:46 +11:00
meeb
70ef11d552 Merge pull request #332 from kuhnchris/patch-1
Remove automatic builds on pull_request
2023-02-13 11:16:11 +11:00
KuhnChris
b04e237cb8 Remove automatic builds on pull_request
Removes the automatic pull request build due to failing for the regular contributors (aside from @meeb).
2023-02-12 19:14:41 +01:00
KuhnChris
55c58b4836 "Sync now" button 2023-02-12 19:03:28 +01:00
meeb
e871983707 Merge pull request #325 from kuhnchris/allow-audio-play
Fix bug getting content_type for "audio only"
2023-02-12 23:51:37 +11:00
meeb
b3f93ddef7 Merge pull request #327 from kuhnchris/dev-file-plrovider
(dev only) allow file download directly from django
2023-02-12 23:46:24 +11:00
KuhnChris
bf7a0fcec0 content_type based on vcodec/acodec 2023-02-12 13:22:57 +01:00
KuhnChris
598ee2bd0a use pathlib; .exists() check 2023-02-12 13:09:33 +01:00
meeb
7b12fe3fad Merge pull request #326 from kuhnchris/audio-player-main
<audio>-player instead of <video> for audio only
2023-02-12 12:02:01 +11:00
meeb
7358b52184 Merge pull request #324 from kuhnchris/suspicious-file-op
Add check against folders outside of DOWNLOAD_ROOT
2023-02-12 11:54:57 +11:00
meeb
4b4b4eb58d Merge pull request #323 from kuhnchris/update-readme
Update README-FAQ regarding `Locking Failed`
2023-02-12 11:53:38 +11:00
KuhnChris
b719fd5122 Allow file fetching directly from django (dev-env) 2023-02-11 22:07:48 +01:00
KuhnChris
4696aebebc allow the use of <audio> if only audio available. 2023-02-11 22:05:36 +01:00
KuhnChris
7d333487fe Fix bug getting content_type for "audio only" 2023-02-11 22:01:01 +01:00
KuhnChris
844d17006e Add check against folders outside of DOWNLOAD_ROOT 2023-02-11 20:42:59 +01:00
KuhnChris
f9a27eb33e Update README-FAQ regarding Locking Failed 2023-02-11 19:16:25 +01:00
meeb
b8434ff444 Merge pull request #320 from ticoombs/main
feat: initial sponsorblock support
2023-02-03 17:50:48 +11:00
Tim Coombs
932eb4caf4 feat: sponsorblock support 2023-02-03 15:18:52 +11:00
meeb
812fbc5f46 remove per-init timeout 2023-01-20 13:55:54 +11:00
meeb
fdc591cc7c fix in-client video player with correct content type headers 2023-01-19 13:29:37 +11:00
meeb
4ae454a4f3 disable s6 service timeout entirely, resolves #309 2023-01-19 12:57:09 +11:00
meeb
4f6af702ae set S6_CMD_WAIT_FOR_SERVICES_MAXTIME to 60s 2023-01-19 11:38:01 +11:00
meeb
2431f8775a bump to 0.12.0 2023-01-19 05:17:50 +11:00
meeb
438316953a increase tubesync-init up timeout to 60s 2023-01-19 04:11:10 +11:00
meeb
85637fecba fix ci build 2023-01-18 18:45:34 +11:00
meeb
f9dfffe91a Merge pull request #305 from biolds/misc
Misc fixes
2023-01-18 18:41:01 +11:00
meeb
0845a6662d switch to s6 v3, bump s6 to 3.1.2.1, bump ffmpeg to 2023-01-03-12-55, bump yt-dlp to 2023.01.06, fix multi-arch builds 2023-01-18 18:39:23 +11:00
Laurent DEFERT
419c4c5a9f source edition refactoring 2023-01-17 21:34:41 +01:00
Laurent DEFERT
2f475bf2a8 limit the number of videos to process 2023-01-15 18:37:00 +01:00
Laurent DEFERT
7d16a1714c add missing migration 2023-01-15 18:37:00 +01:00
Laurent DEFERT
a7100a0f53 prevent exceptions when metadata loading failed 2023-01-15 18:37:00 +01:00
Laurent DEFERT
5a4e6cee58 typo fix 2023-01-08 11:41:30 +01:00
Laurent DEFERT
e69adafcec fix deleting media files 2022-12-28 18:38:38 +01:00
meeb
f9908a4d3b Merge pull request #297 from biolds/embed-video
Embedded video player
2022-12-28 22:41:08 +11:00
Laurent DEFERT
bf99241ad2 embedded video player, video downloads 2022-12-28 12:03:40 +01:00
Laurent DEFERT
0e278bc8c4 fix relative media_file path
FileField should store a relative path, to make their "url" attribute work
2022-12-28 12:03:34 +01:00
meeb
57921ca6b9 Merge pull request #281 from PaulWoitaschek/patch-1
Specify the full compose syntax
2022-11-02 07:57:52 +11:00
Paul Woitaschek
fb23fdeae1 Specify the full compose syntax 2022-11-01 18:02:29 +01:00
meeb
433a7792d5 Merge pull request #275 from serjs/ffmpeg
Change ffmpeg from ytdlp
2022-10-14 21:27:27 +11:00
Sergey Bogatyrets
e198cc011b Change ffmpeg from ytdlp 2022-10-13 13:25:06 +03:00
meeb
296a790af5 bump libs 2022-09-29 03:19:12 +10:00
meeb
e190821b7b bump libs 2022-09-24 16:56:41 +10:00
meeb
1ba865cf0d run build before test so static file tests work without precondition 2022-09-04 11:58:13 +10:00
meeb
05d50c958e add support for m.youtube.com as a netloc when validating source urls, resolves #264 2022-09-04 11:57:15 +10:00
meeb
8426c7309a bump libs 2022-09-04 11:46:04 +10:00
meeb
0450d47d81 bump libs 2022-08-25 17:01:39 +10:00
meeb
e8d899d273 use container config base dir for cookies when using container local settings, resolves #259 2022-08-14 15:43:49 +10:00
meeb
25d5768f6e bump libs 2022-08-14 15:43:06 +10:00
meeb
e9a3f2dd59 url prefix override typo in some environments, related to #255 2022-07-25 16:37:47 +10:00
meeb
7832282545 patch the wsgi application environ to support sub-URLs, add a master ENV var to set a sub-URL, tweak SASS and README to match, actually resolves #255 2022-07-25 13:36:12 +10:00
meeb
d161aef112 allow Django STATIC_URL to be set, resolves #255 2022-07-24 17:51:46 +10:00
meeb
8901aea8d7 Merge branch 'main' of github.com:meeb/tubesync 2022-07-24 17:38:54 +10:00
meeb
227cae4cdb Merge pull request #256 from rstrom1763/main
Correcting various spelling and grammar errors
2022-07-23 17:40:46 +10:00
Ryan
5e57abe86a Correcting various spelling and grammar errors
Corrected various spelling and grammar errors on the README.md file. Utilized a spell checker to verify.
2022-07-23 00:22:05 -05:00
meeb
c04c1b3cfb bump libs 2022-07-20 17:45:10 +10:00
meeb
a94541a354 replace all whitespaec with spaces in filenames, related to #35 2022-07-17 13:45:40 +10:00
meeb
84a368aa09 bump libs 2022-07-17 13:35:00 +10:00
meeb
6d2fb86e7d bump libs 2022-07-06 11:27:52 +10:00
83 changed files with 8160 additions and 997 deletions

View File

@@ -4,12 +4,10 @@ env:
IMAGE_NAME: tubesync
on:
workflow_dispatch:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
test:
@@ -27,7 +25,7 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install pipenv
pipenv install --system
pipenv install --system --skip-lock
- name: Set up Django environment
run: cp tubesync/tubesync/local_settings.py.example tubesync/tubesync/local_settings.py
- name: Run Django tests

4
.gitignore vendored
View File

@@ -1,3 +1,4 @@
.DS_Store
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
@@ -130,3 +131,6 @@ dmypy.json
# Pyre type checker
.pyre/
Pipfile.lock
.vscode/launch.json

View File

@@ -1,28 +1,41 @@
FROM debian:bullseye-slim
FROM debian:bookworm-slim
ARG TARGETPLATFORM
ARG S6_VERSION="2.2.0.3"
ARG S6_VERSION="3.1.5.0"
ARG FFMPEG_DATE="autobuild-2023-11-29-14-19"
ARG FFMPEG_VERSION="112875-g47e214245b"
ENV DEBIAN_FRONTEND="noninteractive" \
HOME="/root" \
LANGUAGE="en_US.UTF-8" \
LANG="en_US.UTF-8" \
LC_ALL="en_US.UTF-8" \
TERM="xterm"
TERM="xterm" \
S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0"
# Install third party software
RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "amd64" ;; \
"linux/arm64") echo "aarch64" ;; \
*) echo "" ;; esac) && \
export S6_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "a7076cf205b331e9f8479bbb09d9df77dbb5cd8f7d12e9b74920902e0c16dd98" ;; \
"linux/arm64") echo "84f585a100b610124bb80e441ef2dc2d68ac2c345fd393d75a6293e0951ccfc5" ;; \
export S6_ARCH_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "65d0d0f353d2ff9d0af202b268b4bf53a9948a5007650854855c729289085739" ;; \
"linux/arm64") echo "3fbd14201473710a592b2189e81f00f3c8998e96d34f16bd2429c35d1bc36d00" ;; \
*) echo "" ;; esac) && \
export S6_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-amd64.tar.gz" ;; \
"linux/arm64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-aarch64.tar.gz" ;; \
export S6_DOWNLOAD_ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-x86_64.tar.xz" ;; \
"linux/arm64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-aarch64.tar.xz" ;; \
*) echo "" ;; esac) && \
export FFMPEG_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "36bac8c527bf390603416f749ab0dd860142b0a66f0865b67366062a9c286c8b" ;; \
"linux/arm64") echo "8f36e45d99d2367a5c0c220ee3164fa48f4f0cec35f78204ccced8dc303bfbdc" ;; \
*) echo "" ;; esac) && \
export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linux64-gpl.tar.xz" ;; \
"linux/arm64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linuxarm64-gpl.tar.xz" ;; \
*) echo "" ;; esac) && \
export S6_NOARCH_EXPECTED_SHA256="fd80c231e8ae1a0667b7ae2078b9ad0e1269c4d117bf447a4506815a700dbff3" && \
export S6_DOWNLOAD_NOARCH="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-noarch.tar.xz" && \
echo "Building for arch: ${ARCH}|${ARCH44}, downloading S6 from: ${S6_DOWNLOAD}}, expecting S6 SHA256: ${S6_EXPECTED_SHA256}" && \
set -x && \
apt-get update && \
@@ -30,15 +43,25 @@ RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
locale-gen en_US.UTF-8 && \
# Install required distro packages
apt-get -y --no-install-recommends install curl ca-certificates binutils && \
apt-get -y --no-install-recommends install curl ca-certificates binutils xz-utils && \
# Install s6
curl -L ${S6_DOWNLOAD} --output /tmp/s6-overlay-${ARCH}.tar.gz && \
sha256sum /tmp/s6-overlay-${ARCH}.tar.gz && \
echo "${S6_EXPECTED_SHA256} /tmp/s6-overlay-${ARCH}.tar.gz" | sha256sum -c - && \
tar xzf /tmp/s6-overlay-${ARCH}.tar.gz -C / && \
curl -L ${S6_DOWNLOAD_NOARCH} --output /tmp/s6-overlay-noarch.tar.xz && \
echo "${S6_NOARCH_EXPECTED_SHA256} /tmp/s6-overlay-noarch.tar.xz" | sha256sum -c - && \
tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz && \
curl -L ${S6_DOWNLOAD_ARCH} --output /tmp/s6-overlay-${ARCH}.tar.xz && \
echo "${S6_ARCH_EXPECTED_SHA256} /tmp/s6-overlay-${ARCH}.tar.xz" | sha256sum -c - && \
tar -C / -Jxpf /tmp/s6-overlay-${ARCH}.tar.xz && \
# Install ffmpeg
echo "Building for arch: ${ARCH}|${ARCH44}, downloading FFMPEG from: ${FFMPEG_DOWNLOAD}, expecting FFMPEG SHA256: ${FFMPEG_EXPECTED_SHA256}" && \
curl -L ${FFMPEG_DOWNLOAD} --output /tmp/ffmpeg-${ARCH}.tar.xz && \
sha256sum /tmp/ffmpeg-${ARCH}.tar.xz && \
echo "${FFMPEG_EXPECTED_SHA256} /tmp/ffmpeg-${ARCH}.tar.xz" | sha256sum -c - && \
tar -xf /tmp/ffmpeg-${ARCH}.tar.xz --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" && \
tar -xf /tmp/ffmpeg-${ARCH}.tar.xz --strip-components=2 --no-anchored -C /usr/local/bin/ "ffprobe" && \
# Clean up
rm -rf /tmp/s6-overlay-${ARCH}.tar.gz && \
apt-get -y autoremove --purge curl binutils
rm -rf /tmp/ffmpeg-${ARCH}.tar.xz && \
apt-get -y autoremove --purge curl binutils xz-utils
# Copy app
COPY tubesync /app
@@ -49,7 +72,6 @@ COPY pip.conf /etc/pip.conf
# Add Pipfile
COPY Pipfile /app/Pipfile
COPY Pipfile.lock /app/Pipfile.lock
# Switch workdir to the the app
WORKDIR /app
@@ -61,31 +83,30 @@ RUN set -x && \
apt-get -y install nginx-light && \
apt-get -y --no-install-recommends install \
python3 \
python3-setuptools \
python3-pip \
python3-dev \
python3-pip \
python3-wheel \
pipenv \
gcc \
g++ \
make \
pkgconf \
default-libmysqlclient-dev \
libmariadb3 \
postgresql-common \
libpq-dev \
libpq5 \
libjpeg62-turbo \
libwebp6 \
libwebp7 \
libjpeg-dev \
zlib1g-dev \
libwebp-dev \
ffmpeg \
redis-server && \
# Install pipenv
pip3 --disable-pip-version-check install wheel pipenv && \
# Create a 'app' user which the application will run as
groupadd app && \
useradd -M -d /app -s /bin/false -g app app && \
# Install non-distro packages
pipenv install --system && \
PIPENV_VERBOSITY=64 pipenv install --system --skip-lock && \
# Make absolutely sure we didn't accidentally bundle a SQLite dev database
rm -rf /app/db.sqlite3 && \
# Run any required app commands
@@ -98,9 +119,7 @@ RUN set -x && \
mkdir -p /downloads/video && \
# Clean up
rm /app/Pipfile && \
rm /app/Pipfile.lock && \
pipenv --clear && \
pip3 --disable-pip-version-check uninstall -y pipenv wheel virtualenv && \
apt-get -y autoremove --purge \
python3-pip \
python3-dev \
@@ -122,11 +141,11 @@ RUN set -x && \
rm -rf /root && \
mkdir -p /root && \
chown root:root /root && \
chmod 0700 /root
chmod 0755 /root
# Append software versions
RUN set -x && \
FFMPEG_VERSION=$(/usr/bin/ffmpeg -version | head -n 1 | awk '{ print $3 }') && \
FFMPEG_VERSION=$(/usr/local/bin/ffmpeg -version | head -n 1 | awk '{ print $3 }') && \
echo "ffmpeg_version = '${FFMPEG_VERSION}'" >> /app/common/third_party_versions.py
# Copy root

View File

@@ -29,5 +29,13 @@ runcontainer:
$(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image)
test:
stopcontainer:
$(docker) stop $(name)
test: build
cd tubesync && $(python) manage.py test --verbosity=2 && cd ..
shell:
cd tubesync && $(python) manage.py shell

View File

@@ -4,6 +4,7 @@ url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
autopep8 = "*"
[packages]
django = "~=3.2"
@@ -15,10 +16,10 @@ gunicorn = "*"
django-compressor = "*"
httptools = "*"
django-background-tasks = "*"
requests = "*"
django-basicauth = "*"
psycopg2-binary = "*"
mysqlclient = "*"
yt-dlp = "*"
redis = "*"
hiredis = "*"
requests = {extras = ["socks"], version = "*"}

723
Pipfile.lock generated
View File

@@ -1,723 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "a8b6cd12970bce4ea2de47aed437cf99ab5e63253a53e587e885c63b32ebc9a1"
},
"pipfile-spec": 6,
"requires": {},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"asgiref": {
"hashes": [
"sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4",
"sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"
],
"markers": "python_version >= '3.7'",
"version": "==3.5.2"
},
"async-timeout": {
"hashes": [
"sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15",
"sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"
],
"markers": "python_version >= '3.6'",
"version": "==4.0.2"
},
"brotli": {
"hashes": [
"sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d",
"sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8",
"sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b",
"sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c",
"sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c",
"sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70",
"sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f",
"sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181",
"sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130",
"sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19",
"sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa",
"sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429",
"sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126",
"sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4",
"sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0",
"sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b",
"sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6",
"sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438",
"sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f",
"sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389",
"sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6",
"sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26",
"sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7",
"sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14",
"sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2",
"sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430",
"sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296",
"sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12",
"sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f",
"sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d",
"sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a",
"sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452",
"sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c",
"sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761",
"sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649",
"sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b",
"sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea",
"sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c",
"sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a",
"sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031",
"sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267",
"sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5",
"sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7",
"sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d",
"sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c",
"sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43",
"sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa",
"sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17",
"sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb",
"sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb",
"sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b",
"sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4",
"sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3",
"sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7",
"sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1",
"sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb",
"sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91",
"sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b",
"sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1",
"sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806",
"sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3",
"sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"
],
"markers": "platform_python_implementation == 'CPython'",
"version": "==1.0.9"
},
"certifi": {
"hashes": [
"sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d",
"sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"
],
"markers": "python_version >= '3.6'",
"version": "==2022.6.15"
},
"charset-normalizer": {
"hashes": [
"sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
"sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"
],
"markers": "python_full_version >= '3.5.0'",
"version": "==2.0.12"
},
"deprecated": {
"hashes": [
"sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d",
"sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.13"
},
"django": {
"hashes": [
"sha256:6d93497a0a9bf6ba0e0b1a29cccdc40efbfc76297255b1309b3a884a688ec4b6",
"sha256:b896ca61edc079eb6bbaa15cf6071eb69d6aac08cce5211583cfb41515644fdf"
],
"index": "pypi",
"version": "==3.2.13"
},
"django-appconf": {
"hashes": [
"sha256:ae9f864ee1958c815a965ed63b3fba4874eec13de10236ba063a788f9a17389d",
"sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"
],
"markers": "python_version >= '3.6'",
"version": "==1.0.5"
},
"django-background-tasks": {
"hashes": [
"sha256:e1b19e8d495a276c9d64c5a1ff8b41132f75d2f58e45be71b78650dad59af9de"
],
"index": "pypi",
"version": "==1.2.5"
},
"django-basicauth": {
"hashes": [
"sha256:15e9e366f698f53c71b1e794dafea060f990a2ac556bae6b7330dd25324a091c",
"sha256:e5e47d1acdc1943bedcc1bf673059d6c15e257dfe9eef67a22fb824f79546c0d"
],
"index": "pypi",
"version": "==0.5.3"
},
"django-compat": {
"hashes": [
"sha256:3ac9a3bedc56b9365d9eb241bc5157d0c193769bf995f9a78dc1bc24e7c2331b"
],
"version": "==1.0.15"
},
"django-compressor": {
"hashes": [
"sha256:1db91b6d04293636a68bd1328dc7bb90d636b0295f67b1cc6d4fa102b9fd25f6",
"sha256:b4fe15cc23bf39420b37cb0030572bd0971104ca1ec3764f502c0f179e576dff"
],
"index": "pypi",
"version": "==4.0"
},
"django-sass-processor": {
"hashes": [
"sha256:7631421e1bd318f8aed4b0e1d962228656cf685228120bcbb964d517cb8e9536",
"sha256:a5aeca9a1ec0a2dafb0dfbf3ec1a746861d2c2146e0171de178f4c1d7c0b472e"
],
"index": "pypi",
"version": "==1.2"
},
"gunicorn": {
"hashes": [
"sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e",
"sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"
],
"index": "pypi",
"version": "==20.1.0"
},
"hiredis": {
"hashes": [
"sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e",
"sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27",
"sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163",
"sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc",
"sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26",
"sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e",
"sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579",
"sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a",
"sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048",
"sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87",
"sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63",
"sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54",
"sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05",
"sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb",
"sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea",
"sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5",
"sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e",
"sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc",
"sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99",
"sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a",
"sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581",
"sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426",
"sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db",
"sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a",
"sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a",
"sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d",
"sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443",
"sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79",
"sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d",
"sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9",
"sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d",
"sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485",
"sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5",
"sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048",
"sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0",
"sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6",
"sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41",
"sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298",
"sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce",
"sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0",
"sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"
],
"index": "pypi",
"version": "==2.0.0"
},
"httptools": {
"hashes": [
"sha256:1a99346ebcb801b213c591540837340bdf6fd060a8687518d01c607d338b7424",
"sha256:1ee0b459257e222b878a6c09ccf233957d3a4dcb883b0847640af98d2d9aac23",
"sha256:20a45bcf22452a10fa8d58b7dbdb474381f6946bf5b8933e3662d572bc61bae4",
"sha256:29bf97a5c532da9c7a04de2c7a9c31d1d54f3abd65a464119b680206bbbb1055",
"sha256:2c9a930c378b3d15d6b695fb95ebcff81a7395b4f9775c4f10a076beb0b2c1ff",
"sha256:2db44a0b294d317199e9f80123e72c6b005c55b625b57fae36de68670090fa48",
"sha256:3194f6d6443befa8d4db16c1946b2fc428a3ceb8ab32eb6f09a59f86104dc1a0",
"sha256:34d2903dd2a3dd85d33705b6fde40bf91fc44411661283763fd0746723963c83",
"sha256:48e48530d9b995a84d1d89ae6b3ec4e59ea7d494b150ac3bbc5e2ac4acce92cd",
"sha256:54bbd295f031b866b9799dd39cb45deee81aca036c9bff9f58ca06726f6494f1",
"sha256:5d1fe6b6661022fd6cac541f54a4237496b246e6f1c0a6b41998ee08a1135afe",
"sha256:645373c070080e632480a3d251d892cb795be3d3a15f86975d0f1aca56fd230d",
"sha256:6a1a7dfc1f9c78a833e2c4904757a0f47ce25d08634dd2a52af394eefe5f9777",
"sha256:701e66b59dd21a32a274771238025d58db7e2b6ecebbab64ceff51b8e31527ae",
"sha256:72aa3fbe636b16d22e04b5a9d24711b043495e0ecfe58080addf23a1a37f3409",
"sha256:7af6bdbd21a2a25d6784f6d67f44f5df33ef39b6159543b9f9064d365c01f919",
"sha256:7ee9f226acab9085037582c059d66769862706e8e8cd2340470ceb8b3850873d",
"sha256:7f7bfb74718f52d5ed47d608d507bf66d3bc01d4a8b3e6dd7134daaae129357b",
"sha256:8e2eb957787cbb614a0f006bfc5798ff1d90ac7c4dd24854c84edbdc8c02369e",
"sha256:903f739c9fb78dab8970b0f3ea51f21955b24b45afa77b22ff0e172fc11ef111",
"sha256:98993805f1e3cdb53de4eed02b55dcc953cdf017ba7bbb2fd89226c086a6d855",
"sha256:9967d9758df505975913304c434cb9ab21e2c609ad859eb921f2f615a038c8de",
"sha256:a113789e53ac1fa26edf99856a61e4c493868e125ae0dd6354cf518948fbbd5c",
"sha256:a522d12e2ddbc2e91842ffb454a1aeb0d47607972c7d8fc88bd0838d97fb8a2a",
"sha256:abe829275cdd4174b4c4e65ad718715d449e308d59793bf3a931ee1bf7e7b86c",
"sha256:c286985b5e194ca0ebb2908d71464b9be8f17cc66d6d3e330e8d5407248f56ad",
"sha256:cd1295f52971097f757edfbfce827b6dbbfb0f7a74901ee7d4933dff5ad4c9af",
"sha256:ceafd5e960b39c7e0d160a1936b68eb87c5e79b3979d66e774f0c77d4d8faaed",
"sha256:d1f27bb0f75bef722d6e22dc609612bfa2f994541621cd2163f8c943b6463dfe",
"sha256:d3a4e165ca6204f34856b765d515d558dc84f1352033b8721e8d06c3e44930c3",
"sha256:d9b90bf58f3ba04e60321a23a8723a1ff2a9377502535e70495e5ada8e6e6722",
"sha256:f72b5d24d6730035128b238decdc4c0f2104b7056a7ca55cf047c106842ec890",
"sha256:fcddfe70553be717d9745990dfdb194e22ee0f60eb8f48c0794e7bfeda30d2d5",
"sha256:fdb9f9ed79bc6f46b021b3319184699ba1a22410a82204e6e89c774530069683"
],
"index": "pypi",
"version": "==0.4.0"
},
"idna": {
"hashes": [
"sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff",
"sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
],
"markers": "python_full_version >= '3.5.0'",
"version": "==3.3"
},
"libsass": {
"hashes": [
"sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb",
"sha256:12f39712de38689a8b785b7db41d3ba2ea1d46f9379d81ea4595802d91fa6529",
"sha256:1e25dd9047a9392d3c59a0b869e0404f2b325a03871ee45285ee33b3664f5613",
"sha256:659ae41af8708681fa3ec73f47b9735a6725e71c3b66ff570bfce78952f2314e",
"sha256:6b984510ed94993708c0d697b4fef2d118929bbfffc3b90037be0f5ccadf55e7",
"sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb",
"sha256:abc29357ee540849faf1383e1746d40d69ed5cb6d4c346df276b258f5aa8977a",
"sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da",
"sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2",
"sha256:e2b1a7d093f2e76dc694c17c0c285e846d0b0deb0e8b21dc852ba1a3a4e2f1d6"
],
"index": "pypi",
"version": "==0.21.0"
},
"mutagen": {
"hashes": [
"sha256:6397602efb3c2d7baebd2166ed85731ae1c1d475abca22090b7141ff5034b3e1",
"sha256:9c9f243fcec7f410f138cb12c21c84c64fde4195481a30c9bfb05b5f003adfed"
],
"markers": "python_version < '4' and python_full_version >= '3.5.0'",
"version": "==1.45.1"
},
"mysqlclient": {
"hashes": [
"sha256:0d1cd3a5a4d28c222fa199002810e8146cffd821410b67851af4cc80aeccd97c",
"sha256:828757e419fb11dd6c5ed2576ec92c3efaa93a0f7c39e263586d1ee779c3d782",
"sha256:996924f3483fd36a34a5812210c69e71dea5a3d5978d01199b78b7f6d485c855",
"sha256:b355c8b5a7d58f2e909acdbb050858390ee1b0e13672ae759e5e784110022994",
"sha256:c1ed71bd6244993b526113cca3df66428609f90e4652f37eb51c33496d478b37",
"sha256:c812b67e90082a840efb82a8978369e6e69fc62ce1bda4ca8f3084a9d862308b",
"sha256:dea88c8d3f5a5d9293dfe7f087c16dd350ceb175f2f6631c9cf4caf3e19b7a96"
],
"index": "pypi",
"version": "==2.1.1"
},
"packaging": {
"hashes": [
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb",
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"
],
"markers": "python_version >= '3.6'",
"version": "==21.3"
},
"pillow": {
"hashes": [
"sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f",
"sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d",
"sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b",
"sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c",
"sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9",
"sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546",
"sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578",
"sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1",
"sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe",
"sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098",
"sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2",
"sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a",
"sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45",
"sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530",
"sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108",
"sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1",
"sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd",
"sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0",
"sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6",
"sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c",
"sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf",
"sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4",
"sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d",
"sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765",
"sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602",
"sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340",
"sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c",
"sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b",
"sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84",
"sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8",
"sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92",
"sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54",
"sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601",
"sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a",
"sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf",
"sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251",
"sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a",
"sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e"
],
"index": "pypi",
"version": "==9.1.1"
},
"psycopg2-binary": {
"hashes": [
"sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7",
"sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76",
"sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa",
"sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9",
"sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004",
"sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1",
"sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094",
"sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57",
"sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af",
"sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554",
"sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232",
"sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c",
"sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b",
"sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834",
"sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2",
"sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71",
"sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460",
"sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e",
"sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4",
"sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d",
"sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d",
"sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9",
"sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f",
"sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063",
"sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478",
"sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092",
"sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c",
"sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce",
"sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1",
"sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65",
"sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e",
"sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4",
"sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029",
"sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33",
"sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39",
"sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53",
"sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307",
"sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42",
"sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35",
"sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8",
"sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb",
"sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae",
"sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e",
"sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f",
"sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba",
"sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24",
"sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca",
"sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb",
"sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef",
"sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42",
"sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1",
"sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667",
"sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272",
"sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281",
"sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e",
"sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"
],
"index": "pypi",
"version": "==2.9.3"
},
"pycryptodomex": {
"hashes": [
"sha256:04cc393045a8f19dd110c975e30f38ed7ab3faf21ede415ea67afebd95a22380",
"sha256:0776bfaf2c48154ab54ea45392847c1283d2fcf64e232e85565f858baedfc1fa",
"sha256:0fadb9f7fa3150577800eef35f62a8a24b9ddf1563ff060d9bd3af22d3952c8c",
"sha256:18e2ab4813883ae63396c0ffe50b13554b32bb69ec56f0afaf052e7a7ae0d55b",
"sha256:191e73bc84a8064ad1874dba0ebadedd7cce4dedee998549518f2c74a003b2e1",
"sha256:35a8f7afe1867118330e2e0e0bf759c409e28557fb1fc2fbb1c6c937297dbe9a",
"sha256:3709f13ca3852b0b07fc04a2c03b379189232b24007c466be0f605dd4723e9d4",
"sha256:4540904c09704b6f831059c0dfb38584acb82cb97b0125cd52688c1f1e3fffa6",
"sha256:463119d7d22d0fc04a0f9122e9d3e6121c6648bcb12a052b51bd1eed1b996aa2",
"sha256:46b3f05f2f7ac7841053da4e0f69616929ca3c42f238c405f6c3df7759ad2780",
"sha256:48697790203909fab02a33226fda546604f4e2653f9d47bc5d3eb40879fa7c64",
"sha256:5676a132169a1c1a3712edf25250722ebc8c9102aa9abd814df063ca8362454f",
"sha256:65204412d0c6a8e3c41e21e93a5e6054a74fea501afa03046a388cf042e3377a",
"sha256:67e1e6a92151023ccdfcfbc0afb3314ad30080793b4c27956ea06ab1fb9bcd8a",
"sha256:6f5b6ba8aefd624834bc177a2ac292734996bb030f9d1b388e7504103b6fcddf",
"sha256:7341f1bb2dadb0d1a0047f34c3a58208a92423cdbd3244d998e4b28df5eac0ed",
"sha256:78d9621cf0ea35abf2d38fa2ca6d0634eab6c991a78373498ab149953787e5e5",
"sha256:8eecdf9cdc7343001d047f951b9cc805cd68cb6cd77b20ea46af5bffc5bd3dfb",
"sha256:94c7b60e1f52e1a87715571327baea0733708ab4723346598beca4a3b6879794",
"sha256:996e1ba717077ce1e6d4849af7a1426f38b07b3d173b879e27d5e26d2e958beb",
"sha256:a07a64709e366c2041cd5cfbca592b43998bf4df88f7b0ca73dca37071ccf1bd",
"sha256:b6306403228edde6e289f626a3908a2f7f67c344e712cf7c0a508bab3ad9e381",
"sha256:b9279adc16e4b0f590ceff581f53a80179b02cba9056010d733eb4196134a870",
"sha256:c4cb9cb492ea7dcdf222a8d19a1d09002798ea516aeae8877245206d27326d86",
"sha256:dd452a5af7014e866206d41751886c9b4bf379a339fdf2dbfc7dd16c0fb4f8e0",
"sha256:e2b12968522a0358b8917fc7b28865acac002f02f4c4c6020fcb264d76bfd06d",
"sha256:e3164a18348bd53c69b4435ebfb4ac8a4076291ffa2a70b54f0c4b80c7834b1d",
"sha256:e47bf8776a7e15576887f04314f5228c6527b99946e6638cf2f16da56d260cab",
"sha256:f8be976cec59b11f011f790b88aca67b4ea2bd286578d0bd3e31bcd19afcd3e4",
"sha256:fc9bc7a9b79fe5c750fc81a307052f8daabb709bdaabb0fb18fb136b66b653b5"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==3.15.0"
},
"pyparsing": {
"hashes": [
"sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb",
"sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"
],
"markers": "python_full_version >= '3.6.8'",
"version": "==3.0.9"
},
"pytz": {
"hashes": [
"sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7",
"sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"
],
"version": "==2022.1"
},
"rcssmin": {
"hashes": [
"sha256:0a6aae7e119509445bf7aa6da6ca0f285cc198273c20f470ad999ff83bbadcf9",
"sha256:1512223b6a687bb747e4e531187bd49a56ed71287e7ead9529cbaa1ca4718a0a",
"sha256:1d7c2719d014e4e4df4e33b75ae8067c7e246cf470eaec8585e06e2efac7586c",
"sha256:2211a5c91ea14a5937b57904c9121f8bfef20987825e55368143da7d25446e3b",
"sha256:27fc400627fd3d328b7fe95af2a01f5d0af6b5af39731af5d071826a1f08e362",
"sha256:30f5522285065cae0164d20068377d84b5d10b414156115f8729b034d0ea5e8b",
"sha256:32ccaebbbd4d56eab08cf26aed36f5d33389b9d1d3ca1fecf53eb6ab77760ddf",
"sha256:352dd3a78eb914bb1cb269ac2b66b3154f2490a52ab605558c681de3fb5194d2",
"sha256:37f1242e34ca273ed2c26cf778854e18dd11b31c6bfca60e23fce146c84667c1",
"sha256:49807735f26f59404194f1e6f93254b6d5b6f7748c2a954f4470a86a40ff4c13",
"sha256:506e33ab4c47051f7deae35b6d8dbb4a5c025f016e90a830929a1ecc7daa1682",
"sha256:6158d0d86cd611c5304d738dc3d6cfeb23864dd78ad0d83a633f443696ac5d77",
"sha256:7085d1b51dd2556f3aae03947380f6e9e1da29fb1eeadfa6766b7f105c54c9ff",
"sha256:7c44002b79f3656348196005b9522ec5e04f182b466f66d72b16be0bd03c13d8",
"sha256:7da63fee37edf204bbd86785edb4d7491642adbfd1d36fd230b7ccbbd8db1a6f",
"sha256:8b659a88850e772c84cfac4520ec223de6807875e173d8ef3248ab7f90876066",
"sha256:c28b9eb20982b45ebe6adef8bd2547e5ed314dafddfff4eba806b0f8c166cfd1",
"sha256:ddff3a41611664c7f1d9e3d8a9c1669e0e155ac0458e586ffa834dc5953e7d9f",
"sha256:f1a37bbd36b050813673e62ae6464467548628690bf4d48a938170e121e8616e",
"sha256:f31c82d06ba2dbf33c20db9550157e80bb0c4cbd24575c098f0831d1d2e3c5df"
],
"version": "==1.1.0"
},
"redis": {
"hashes": [
"sha256:a52d5694c9eb4292770084fa8c863f79367ca19884b329ab574d5cb2036b3e54",
"sha256:ddf27071df4adf3821c4f2ca59d67525c3a82e5f268bed97b813cb4fabf87880"
],
"index": "pypi",
"version": "==4.3.4"
},
"requests": {
"hashes": [
"sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f",
"sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"
],
"index": "pypi",
"version": "==2.28.0"
},
"rjsmin": {
"hashes": [
"sha256:05efa485dfddb6418e3b86d8862463aa15641a61f6ae05e7e6de8f116ee77c69",
"sha256:1622fbb6c6a8daaf77da13cc83356539bfe79c1440f9664b02c7f7b150b9a18e",
"sha256:1c93b29fd725e61718299ffe57de93ff32d71b313eaabbfcc7bd32ddb82831d5",
"sha256:2ed83aca637186bafdc894b4b7fc3657e2d74014ccca7d3d69122c1e82675216",
"sha256:38a4474ed52e1575fb9da983ec8657faecd8ab3738508d36e04f87769411fd3d",
"sha256:3b14f4c2933ec194eb816b71a0854ce461b6419a3d852bf360344731ab28c0a6",
"sha256:40e7211a25d9a11ac9ff50446e41268c978555676828af86fa1866615823bfff",
"sha256:41c7c3910f7b8816e37366b293e576ddecf696c5f2197d53cf2c1526ac336646",
"sha256:4387a00777faddf853eebdece9f2e56ebaf243c3f24676a9de6a20c5d4f3d731",
"sha256:54fc30519365841b27556ccc1cb94c5b4413c384ff6d467442fddba66e2e325a",
"sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62",
"sha256:6c529feb6c400984452494c52dd9fdf59185afeacca2afc5174a28ab37751a1b",
"sha256:86c4da7285ddafe6888cb262da563570f28e4a31146b5164a7a6947b1222196b",
"sha256:8944a8a55ac825b8e5ec29f341ecb7574697691ef416506885898d2f780fb4ca",
"sha256:993935654c1311280e69665367d7e6ff694ac9e1609168cf51cae8c0307df0db",
"sha256:99e5597a812b60058baa1457387dc79cca7d273b2a700dc98bfd20d43d60711d",
"sha256:b6a7c8c8d19e154334f640954e43e57283e87bb4a2f6e23295db14eea8e9fc1d",
"sha256:c81229ffe5b0a0d5b3b5d5e6d0431f182572de9e9a077e85dbae5757db0ab75c",
"sha256:d63e193a2f932a786ae82068aa76d1d126fcdff8582094caff9e5e66c4dcc124",
"sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"
],
"version": "==1.2.0"
},
"setuptools": {
"hashes": [
"sha256:990a4f7861b31532871ab72331e755b5f14efbe52d336ea7f6118144dd478741",
"sha256:c1848f654aea2e3526d17fc3ce6aeaa5e7e24e66e645b5be2171f3f6b4e5a178"
],
"markers": "python_version >= '3.7'",
"version": "==62.6.0"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"sqlparse": {
"hashes": [
"sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae",
"sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"
],
"markers": "python_full_version >= '3.5.0'",
"version": "==0.4.2"
},
"urllib3": {
"hashes": [
"sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14",
"sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.26.9"
},
"websockets": {
"hashes": [
"sha256:07cdc0a5b2549bcfbadb585ad8471ebdc7bdf91e32e34ae3889001c1c106a6af",
"sha256:210aad7fdd381c52e58777560860c7e6110b6174488ef1d4b681c08b68bf7f8c",
"sha256:28dd20b938a57c3124028680dc1600c197294da5db4292c76a0b48efb3ed7f76",
"sha256:2f94fa3ae454a63ea3a19f73b95deeebc9f02ba2d5617ca16f0bbdae375cda47",
"sha256:31564a67c3e4005f27815634343df688b25705cccb22bc1db621c781ddc64c69",
"sha256:347974105bbd4ea068106ec65e8e8ebd86f28c19e529d115d89bd8cc5cda3079",
"sha256:379e03422178436af4f3abe0aa8f401aa77ae2487843738542a75faf44a31f0c",
"sha256:3eda1cb7e9da1b22588cefff09f0951771d6ee9fa8dbe66f5ae04cc5f26b2b55",
"sha256:51695d3b199cd03098ae5b42833006a0f43dc5418d3102972addc593a783bc02",
"sha256:54c000abeaff6d8771a4e2cef40900919908ea7b6b6a30eae72752607c6db559",
"sha256:5b936bf552e4f6357f5727579072ff1e1324717902127ffe60c92d29b67b7be3",
"sha256:6075fd24df23133c1b078e08a9b04a3bc40b31a8def4ee0b9f2c8865acce913e",
"sha256:661f641b44ed315556a2fa630239adfd77bd1b11cb0b9d96ed8ad90b0b1e4978",
"sha256:6ea6b300a6bdd782e49922d690e11c3669828fe36fc2471408c58b93b5535a98",
"sha256:6ed1d6f791eabfd9808afea1e068f5e59418e55721db8b7f3bfc39dc831c42ae",
"sha256:7934e055fd5cd9dee60f11d16c8d79c4567315824bacb1246d0208a47eca9755",
"sha256:7ab36e17af592eec5747c68ef2722a74c1a4a70f3772bc661079baf4ae30e40d",
"sha256:7f6d96fdb0975044fdd7953b35d003b03f9e2bcf85f2d2cf86285ece53e9f991",
"sha256:83e5ca0d5b743cde3d29fda74ccab37bdd0911f25bd4cdf09ff8b51b7b4f2fa1",
"sha256:85506b3328a9e083cc0a0fb3ba27e33c8db78341b3eb12eb72e8afd166c36680",
"sha256:8af75085b4bc0b5c40c4a3c0e113fa95e84c60f4ed6786cbb675aeb1ee128247",
"sha256:8b1359aba0ff810d5830d5ab8e2c4a02bebf98a60aa0124fb29aa78cfdb8031f",
"sha256:8fbd7d77f8aba46d43245e86dd91a8970eac4fb74c473f8e30e9c07581f852b2",
"sha256:907e8247480f287aa9bbc9391bd6de23c906d48af54c8c421df84655eef66af7",
"sha256:93d5ea0b5da8d66d868b32c614d2b52d14304444e39e13a59566d4acb8d6e2e4",
"sha256:97bc9d41e69a7521a358f9b8e44871f6cdeb42af31815c17aed36372d4eec667",
"sha256:994cdb1942a7a4c2e10098d9162948c9e7b235df755de91ca33f6e0481366fdb",
"sha256:a141de3d5a92188234afa61653ed0bbd2dde46ad47b15c3042ffb89548e77094",
"sha256:a1e15b230c3613e8ea82c9fc6941b2093e8eb939dd794c02754d33980ba81e36",
"sha256:aad5e300ab32036eb3fdc350ad30877210e2f51bceaca83fb7fef4d2b6c72b79",
"sha256:b529fdfa881b69fe563dbd98acce84f3e5a67df13de415e143ef053ff006d500",
"sha256:b9c77f0d1436ea4b4dc089ed8335fa141e6a251a92f75f675056dac4ab47a71e",
"sha256:bb621ec2dbbbe8df78a27dbd9dd7919f9b7d32a73fafcb4d9252fc4637343582",
"sha256:c7250848ce69559756ad0086a37b82c986cd33c2d344ab87fea596c5ac6d9442",
"sha256:c8d1d14aa0f600b5be363077b621b1b4d1eb3fbf90af83f9281cda668e6ff7fd",
"sha256:d1655a6fc7aecd333b079d00fb3c8132d18988e47f19740c69303bf02e9883c6",
"sha256:d6353ba89cfc657a3f5beabb3b69be226adbb5c6c7a66398e17809b0ce3c4731",
"sha256:da4377904a3379f0c1b75a965fff23b28315bcd516d27f99a803720dfebd94d4",
"sha256:e49ea4c1a9543d2bd8a747ff24411509c29e4bdcde05b5b0895e2120cb1a761d",
"sha256:e4e08305bfd76ba8edab08dcc6496f40674f44eb9d5e23153efa0a35750337e8",
"sha256:e6fa05a680e35d0fcc1470cb070b10e6fe247af54768f488ed93542e71339d6f",
"sha256:e7e6f2d6fd48422071cc8a6f8542016f350b79cc782752de531577d35e9bd677",
"sha256:e904c0381c014b914136c492c8fa711ca4cced4e9b3d110e5e7d436d0fc289e8",
"sha256:ec2b0ab7edc8cd4b0eb428b38ed89079bdc20c6bdb5f889d353011038caac2f9",
"sha256:ef5ce841e102278c1c2e98f043db99d6755b1c58bde475516aef3a008ed7f28e",
"sha256:f351c7d7d92f67c0609329ab2735eee0426a03022771b00102816a72715bb00b",
"sha256:fab7c640815812ed5f10fbee7abbf58788d602046b7bb3af9b1ac753a6d5e916",
"sha256:fc06cc8073c8e87072138ba1e431300e2d408f054b27047d047b549455066ff4"
],
"markers": "python_version >= '3.7'",
"version": "==10.3"
},
"whitenoise": {
"hashes": [
"sha256:8e9c600a5c18bd17655ef668ad55b5edf6c24ce9bdca5bf607649ca4b1e8e2c2",
"sha256:8fa943c6d4cd9e27673b70c21a07b0aa120873901e099cd46cab40f7cc96d567"
],
"index": "pypi",
"version": "==6.2.0"
},
"wrapt": {
"hashes": [
"sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3",
"sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b",
"sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4",
"sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2",
"sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656",
"sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3",
"sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff",
"sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310",
"sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a",
"sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57",
"sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069",
"sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383",
"sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe",
"sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87",
"sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d",
"sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b",
"sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907",
"sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f",
"sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0",
"sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28",
"sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1",
"sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853",
"sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc",
"sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3",
"sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3",
"sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164",
"sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1",
"sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c",
"sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1",
"sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7",
"sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1",
"sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320",
"sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed",
"sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1",
"sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248",
"sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c",
"sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456",
"sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77",
"sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef",
"sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1",
"sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7",
"sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86",
"sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4",
"sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d",
"sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d",
"sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8",
"sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5",
"sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471",
"sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00",
"sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68",
"sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3",
"sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d",
"sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735",
"sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d",
"sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569",
"sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7",
"sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59",
"sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5",
"sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb",
"sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b",
"sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f",
"sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462",
"sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015",
"sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==1.14.1"
},
"yt-dlp": {
"hashes": [
"sha256:5fbfac72fd035d11bc2693e5d1cd6933b1bc0712f742f5082a261703810bb5c9",
"sha256:a688f5cbc4a824456983774ccdd4a12befd379f6c92e25074fa85e7b8ce31704"
],
"index": "pypi",
"version": "==2022.6.29"
}
},
"develop": {}
}

View File

@@ -74,7 +74,7 @@ works in a Docker Compose stack. `amd64` (most desktop PCs and servers) and `arm
Example (with Docker on *nix):
First find your the user ID and group ID you want to run TubeSync as, if you're not
First find the user ID and group ID you want to run TubeSync as, if you're not
sure what this is it's probably your current user ID and group ID:
```bash
@@ -117,11 +117,13 @@ $ docker run \
Once running, open `http://localhost:4848` in your browser and you should see the
TubeSync dashboard. If you do, you can proceed to adding some sources (YouTube channels
and playlists). If not, check `docker logs tubesync` to see what errors might be
occuring, typical ones are file permission issues.
occurring, typical ones are file permission issues.
Alternatively, for Docker Compose, you can use something like:
```yaml
```yml
version: '3.7'
services:
tubesync:
image: ghcr.io/meeb/tubesync:latest
container_name: tubesync
@@ -149,7 +151,7 @@ HTTP_USER
HTTP_PASS
```
For example in the `docker run ...` line add in:
For example, in the `docker run ...` line add in:
```bash
...
@@ -239,6 +241,7 @@ and less common features:
* [Reset tasks from the command line](https://github.com/meeb/tubesync/blob/main/docs/reset-tasks.md)
* [Using PostgreSQL, MySQL or MariaDB as database backends](https://github.com/meeb/tubesync/blob/main/docs/other-database-backends.md)
* [Using cookies](https://github.com/meeb/tubesync/blob/main/docs/using-cookies.md)
* [Reset metadata](https://github.com/meeb/tubesync/blob/main/docs/reset-metadata.md)
# Warnings
@@ -292,15 +295,15 @@ your install is doing check the container logs.
### Are there alerts when a download is complete?
No, this feature is best served by existing services such as the execelent
No, this feature is best served by existing services such as the excellent
[Tautulli](https://tautulli.com/) which can monitor your Plex server and send alerts
that way.
### There's errors in my "tasks" tab!
### There are errors in my "tasks" tab!
You only really need to worry about these if there is a permanent failure. Some errors
are temproary and will be retried for you automatically, such as a download got
interrupted and will be tried again later. Sources with permanet errors (such as no
are temporary and will be retried for you automatically, such as a download got
interrupted and will be tried again later. Sources with permanent errors (such as no
media available because you got a channel name wrong) will be shown as errors on the
"sources" tab.
@@ -322,7 +325,7 @@ See the [Pipefile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a ful
### Can I get access to the full Django admin?
Yes, although pretty much all operations are available through the front end interface
Yes, although pretty much all operations are available through the front-end interface
and you can probably break things by playing in the admin. If you still want to access
it you can run:
@@ -348,6 +351,10 @@ etc.). Configuration of this is beyond the scope of this README.
Just `amd64` for the moment. Others may be made available if there is demand.
### The pipenv install fails with "Locking failed"!
Make sure that you have `mysql_config` or `mariadb_config` available, as required by the python module `mysqlclient`. On Debian-based systems this is usually found in the package `libmysqlclient-dev`
# Advanced configuration
@@ -355,25 +362,26 @@ There are a number of other environment variables you can set. These are, mostly
**NOT** required to be set in the default container installation, they are really only
useful if you are manually installing TubeSync in some other environment. These are:
| Name | What | Example |
| ------------------------ | ------------------------------------------------------------ | ------------------------------------ |
| DJANGO_SECRET_KEY | Django's SECRET_KEY | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l |
| DJANGO_FORCE_SCRIPT_NAME | Django's FORCE_SCRIPT_NAME | /somepath |
| TUBESYNC_DEBUG | Enable debugging | True |
| TUBESYNC_WORKERS | Number of background workers, default is 2, max allowed is 8 | 2 |
| TUBESYNC_HOSTS | Django's ALLOWED_HOSTS, defaults to `*` | tubesync.example.com,otherhost.com |
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
| HTTP_USER | Sets the username for HTTP basic authentication | some-username |
| HTTP_PASS | Sets the password for HTTP basic authentication | some-secure-password |
| DATABASE_CONNECTION | Optional external database connection details | mysql://user:pass@host:port/database |
| Name | What | Example |
| --------------------------- | ------------------------------------------------------------ | ------------------------------------ |
| DJANGO_SECRET_KEY | Django's SECRET_KEY | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l |
| DJANGO_URL_PREFIX | Run TubeSync in a sub-URL on the web server | /somepath/ |
| TUBESYNC_DEBUG | Enable debugging | True |
| TUBESYNC_WORKERS | Number of background workers, default is 2, max allowed is 8 | 2 |
| TUBESYNC_HOSTS | Django's ALLOWED_HOSTS, defaults to `*` | tubesync.example.com,otherhost.com |
| TUBESYNC_RESET_DOWNLOAD_DIR | Toggle resetting `/downloads` permissions, defaults to True | True
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
| HTTP_USER | Sets the username for HTTP basic authentication | some-username |
| HTTP_PASS | Sets the password for HTTP basic authentication | some-secure-password |
| DATABASE_CONNECTION | Optional external database connection details | mysql://user:pass@host:port/database |
# Manual, non-containerised, installation
As a relatively normal Django app you can run TubeSync without the container. Beyond
following this rough guide you are on your own and should be knowledgeable about
following this rough guide, you are on your own and should be knowledgeable about
installing and running WSGI-based Python web applications before attempting this.
1. Clone or download this repo
@@ -396,7 +404,7 @@ installing and running WSGI-based Python web applications before attempting this
# Tests
There is a moderately comprehensive test suite focussing on the custom media format
There is a moderately comprehensive test suite focusing on the custom media format
matching logic and that the front-end interface works. You can run it via Django:
```bash

View File

@@ -1,27 +0,0 @@
#!/usr/bin/with-contenv bash
# Change runtime user UID and GID
PUID=${PUID:-911}
PGID=${PGID:-911}
groupmod -o -g "$PGID" app
usermod -o -u "$PUID" app
# Reset permissions
chown -R app:app /run/app && \
chmod -R 0700 /run/app && \
chown -R app:app /config && \
chmod -R 0755 /config && \
chown -R app:app /downloads && \
chmod -R 0755 /downloads && \
chown -R root:app /app && \
chmod -R 0750 /app && \
chown -R app:app /app/common/static && \
chmod -R 0750 /app/common/static && \
chown -R app:app /app/static && \
chmod -R 0750 /app/static && \
find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \; && \
chmod 0755 /app/healthcheck.py
# Run migrations
exec s6-setuidgid app \
/usr/bin/python3 /app/manage.py migrate

View File

@@ -79,6 +79,11 @@ http {
proxy_connect_timeout 10;
}
# File dwnload and streaming
location /media-data/ {
internal;
alias /downloads/;
}
}
}

View File

@@ -0,0 +1 @@
gunicorn

View File

@@ -0,0 +1,25 @@
#!/usr/bin/with-contenv bash
UMASK_SET=${UMASK_SET:-022}
umask "$UMASK_SET"
cd /app || exit
PIDFILE=/run/app/celery-beat.pid
SCHEDULE=/tmp/tubesync-celerybeat-schedule
if [ -f "${PIDFILE}" ]
then
PID=$(cat $PIDFILE)
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
if kill -0 $PID
then
echo "Killing old gunicorn process with PID: ${PID}"
kill -9 $PID
fi
echo "Removing stale PID file: ${PIDFILE}"
rm ${PIDFILE}
fi
#exec s6-setuidgid app \
# /usr/local/bin/celery --workdir /app -A tubesync beat --pidfile ${PIDFILE} -s ${SCHEDULE}

View File

@@ -0,0 +1 @@
longrun

View File

@@ -0,0 +1 @@
gunicorn

View File

@@ -0,0 +1,24 @@
#!/usr/bin/with-contenv bash
UMASK_SET=${UMASK_SET:-022}
umask "$UMASK_SET"
cd /app || exit
PIDFILE=/run/app/celery-worker.pid
if [ -f "${PIDFILE}" ]
then
PID=$(cat $PIDFILE)
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
if kill -0 $PID
then
echo "Killing old gunicorn process with PID: ${PID}"
kill -9 $PID
fi
echo "Removing stale PID file: ${PIDFILE}"
rm ${PIDFILE}
fi
#exec s6-setuidgid app \
# /usr/local/bin/celery --workdir /app -A tubesync worker --pidfile ${PIDFILE} -l INFO

View File

@@ -0,0 +1 @@
longrun

View File

@@ -0,0 +1 @@
tubesync-init

View File

@@ -1,4 +1,4 @@
#!/usr/bin/with-contenv bash
#!/command/with-contenv bash
UMASK_SET=${UMASK_SET:-022}
umask "$UMASK_SET"

View File

@@ -0,0 +1 @@
longrun

View File

@@ -0,0 +1 @@
gunicorn

View File

@@ -0,0 +1,5 @@
#!/command/with-contenv bash
cd /
/usr/sbin/nginx

View File

@@ -0,0 +1 @@
longrun

View File

@@ -1,4 +1,4 @@
#!/usr/bin/with-contenv bash
#!/command/with-contenv bash
exec s6-setuidgid redis \
/usr/bin/redis-server /etc/redis/redis.conf

View File

@@ -0,0 +1 @@
longrun

View File

@@ -0,0 +1,34 @@
#!/command/with-contenv bash
# Change runtime user UID and GID
PUID="${PUID:-911}"
PUID="${PUID:-911}"
groupmod -o -g "$PGID" app
usermod -o -u "$PUID" app
# Reset permissions
chown -R app:app /run/app
chmod -R 0700 /run/app
chown -R app:app /config
chmod -R 0755 /config
chown -R root:app /app
chmod -R 0750 /app
chown -R app:app /app/common/static
chmod -R 0750 /app/common/static
chown -R app:app /app/static
chmod -R 0750 /app/static
find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \;
chmod 0755 /app/healthcheck.py
# Optionally reset the download dir permissions
TUBESYNC_RESET_DOWNLOAD_DIR="${TUBESYNC_RESET_DOWNLOAD_DIR:-True}"
if [ "$TUBESYNC_RESET_DOWNLOAD_DIR" == "True" ]
then
echo "TUBESYNC_RESET_DOWNLOAD_DIR=True, Resetting /downloads directory permissions"
chown -R app:app /downloads
chmod -R 0755 /downloads
fi
# Run migrations
exec s6-setuidgid app \
/usr/bin/python3 /app/manage.py migrate

View File

@@ -0,0 +1 @@
oneshot

View File

@@ -0,0 +1,3 @@
#!/command/execlineb -P
/etc/s6-overlay/s6-rc.d/tubesync-init/run

View File

@@ -0,0 +1 @@
gunicorn

View File

@@ -1,4 +1,4 @@
#!/usr/bin/with-contenv bash
#!/command/with-contenv bash
exec s6-setuidgid app \
/usr/bin/python3 /app/manage.py process_tasks

View File

@@ -0,0 +1 @@
longrun

View File

@@ -1,5 +0,0 @@
#!/usr/bin/with-contenv bash
cd /
/usr/sbin/nginx

View File

@@ -24,7 +24,7 @@ $ docker exec -i tubesync python3 /app/manage.py dumpdata > some-file.json
Then change you database backend over, then use
```bash
$ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata --format=json -
$ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata - --format=json
```
As detailed in the Django documentation:
@@ -78,3 +78,55 @@ entry in the container or stdout logs:
If you see a line similar to the above and the web interface loads, congratulations,
you are now using an external database server for your TubeSync data!
## Database Compression (For MariaDB)
With a lot of media files the `sync_media` table grows in size quickly.
You can save space using column compression using the following steps while using MariaDB:
1. Stop tubesync
2. Execute `ALTER TABLE sync_media MODIFY metadata LONGTEXT COMPRESSED;` on database tubesync
3. Start tunesync and confirm the connection still works.
## Docker Compose
If you're using Docker Compose and simply want to connect to another container with
the DB for the performance benefits, a configuration like this would be enough:
```
tubesync-db:
image: postgres:15.2
container_name: tubesync-db
restart: unless-stopped
volumes:
- /<path/to>/init.sql:/docker-entrypoint-initdb.d/init.sql
- /<path/to>/tubesync-db:/var/lib/postgresql/data
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=testpassword
tubesync:
image: ghcr.io/meeb/tubesync:latest
container_name: tubesync
restart: unless-stopped
ports:
- 4848:4848
volumes:
- /<path/to>/tubesync/config:/config
- /<path/to>/YouTube:/downloads
environment:
- DATABASE_CONNECTION=postgresql://postgres:testpassword@tubesync-db:5432/tubesync
depends_on:
- tubesync-db
```
Note that an `init.sql` file is needed to initialize the `tubesync`
database before it can be written to. This file should contain:
```
CREATE DATABASE tubesync;
```
Then it must be mapped to `/docker-entrypoint-initdb.d/init.sql` for it
to be executed on first startup of the container. See the `tubesync-db`
volume mapping above for how to do this.

30
docs/reset-metadata.md Normal file
View File

@@ -0,0 +1,30 @@
# TubeSync
## Advanced usage guide - reset media metadata from the command line
This command allows you to reset all media item metadata. You might want to use
this if you have a lot of media items with invalid metadata and you want to
wipe it which triggers the metadata to be redownloaded.
## Requirements
You have added some sources and media
## Steps
### 1. Run the reset tasks command
Execute the following Django command:
`./manage.py reset-metadata`
When deploying TubeSync inside a container, you can execute this with:
`docker exec -ti tubesync python3 /app/manage.py reset-metadata`
This command will log what its doing to the terminal when you run it.
When this is run, new tasks will be immediately created so all your media
items will start downloading updated metadata straight away, any missing information
such as thumbnails will be redownloaded, etc.

View File

@@ -1,10 +1,14 @@
import logging
from django.conf import settings
logging_level = logging.DEBUG if settings.DEBUG else logging.INFO
log = logging.getLogger('tubesync')
log.setLevel(logging.DEBUG)
log.setLevel(logging_level)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setLevel(logging_level)
formatter = logging.Formatter('%(asctime)s [%(name)s/%(levelname)s] %(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)

View File

@@ -1,20 +1,20 @@
@font-face {
font-family: 'roboto-light';
src: url('/static/fonts/roboto/roboto-light.woff') format('woff');
font-family: 'roboto';
src: url('../fonts/roboto/roboto-light.woff') format('woff');
font-weight: lighter;
font-style: normal;
}
@font-face {
font-family: 'roboto';
src: url('../fonts/roboto/roboto-regular.woff') format('woff');
font-weight: normal;
font-style: normal;
}
@font-face {
font-family: 'roboto-regular';
src: url('/static/fonts/roboto/roboto-regular.woff') format('woff');
font-weight: normal;
font-style: normal;
}
@font-face {
font-family: 'roboto-bold';
src: url('/static/fonts/roboto/roboto-bold.woff') format('woff');
font-family: 'roboto';
src: url('../fonts/roboto/roboto-bold.woff') format('woff');
font-weight: bold;
font-style: normal;
}

View File

@@ -1,2 +1,2 @@
$font-family: 'roboto-regular', Arial, Helvetica, sans-serif;
$font-family: 'roboto', Arial, Helvetica, sans-serif;
$font-size: 1.05rem;

View File

@@ -65,6 +65,7 @@ readers do not read off random characters that represent icons */
.#{$fa-css-prefix}-arrows-alt-h:before { content: fa-content($fa-var-arrows-alt-h); }
.#{$fa-css-prefix}-arrows-alt-v:before { content: fa-content($fa-var-arrows-alt-v); }
.#{$fa-css-prefix}-artstation:before { content: fa-content($fa-var-artstation); }
.#{$fa-css-prefix}-arrow-rotate-right:before { content: fa-content($fa-var-arrow-rotate-right); }
.#{$fa-css-prefix}-assistive-listening-systems:before { content: fa-content($fa-var-assistive-listening-systems); }
.#{$fa-css-prefix}-asterisk:before { content: fa-content($fa-var-asterisk); }
.#{$fa-css-prefix}-asymmetrik:before { content: fa-content($fa-var-asymmetrik); }

View File

@@ -80,6 +80,7 @@ $fa-var-arrow-right: \f061;
$fa-var-arrow-up: \f062;
$fa-var-arrows-alt: \f0b2;
$fa-var-arrows-alt-h: \f337;
$fa-var-arrow-rotate-right: \f01e;
$fa-var-arrows-alt-v: \f338;
$fa-var-artstation: \f77a;
$fa-var-assistive-listening-systems: \f2a2;

View File

@@ -14,7 +14,7 @@
// Text Label Style
+ span:not(.lever) {
position: relative;
padding-left: 35px;
padding-left: 27px;
cursor: pointer;
display: inline-block;
height: 25px;

View File

@@ -17,3 +17,16 @@ html {
visibility: visible;
opacity: 1;
}
.flex-collection-container {
display: flex !important;
align-items: center;
}
.flex-grow {
flex-grow: 1;
}
.help-text > i {
padding-right: 6px;
}

View File

@@ -3,7 +3,7 @@
<div class="col s12">
<div class="pagination">
{% for i in paginator.page_range %}
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}">{{ i }}</a>
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}{% if only_skipped %}&only_skipped=yes{% endif %}">{{ i }}</a>
{% endfor %}
</div>
</div>

View File

@@ -2,7 +2,7 @@ import os.path
from django.conf import settings
from django.test import TestCase, Client
from .testutils import prevent_request_warnings
from .utils import parse_database_connection_string
from .utils import parse_database_connection_string, clean_filename
from .errors import DatabaseConnectionError
@@ -65,7 +65,7 @@ class CommonStaticTestCase(TestCase):
self.assertTrue(os.path.exists(favicon_real_path))
class DatabaseConnectionTestCase(TestCase):
class UtilsTestCase(TestCase):
def test_parse_database_connection_string(self):
database_dict = parse_database_connection_string(
@@ -126,3 +126,12 @@ class DatabaseConnectionTestCase(TestCase):
with self.assertRaises(DatabaseConnectionError):
parse_database_connection_string(
'postgresql://tubesync:password@localhost:5432/tubesync/test')
def test_clean_filename(self):
self.assertEqual(clean_filename('a'), 'a')
self.assertEqual(clean_filename('a\t'), 'a')
self.assertEqual(clean_filename('a\n'), 'a')
self.assertEqual(clean_filename('a a'), 'a a')
self.assertEqual(clean_filename('a a'), 'a a')
self.assertEqual(clean_filename('a\t\t\ta'), 'a a')
self.assertEqual(clean_filename('a\t\t\ta\t\t\t'), 'a a')

View File

@@ -1,3 +1,4 @@
import string
from datetime import datetime
from urllib.parse import urlunsplit, urlencode, urlparse
from yt_dlp.utils import LazyList
@@ -113,8 +114,13 @@ def clean_filename(filename):
to_scrub = '<>\/:*?"|%'
for char in to_scrub:
filename = filename.replace(char, '')
filename = ''.join([c for c in filename if ord(c) > 30])
return ' '.join(filename.split())
clean_filename = ''
for c in filename:
if c in string.whitespace:
c = ' '
if ord(c) > 30:
clean_filename += c
return clean_filename.strip()
def json_serial(obj):

109
tubesync/sync/fields.py Normal file
View File

@@ -0,0 +1,109 @@
from django.forms import MultipleChoiceField, CheckboxSelectMultiple, Field, TypedMultipleChoiceField
from django.db import models
from typing import Any, Optional, Dict
from django.utils.translation import gettext_lazy as _
# this is a form field!
class CustomCheckboxSelectMultiple(CheckboxSelectMultiple):
template_name = 'widgets/checkbox_select.html'
option_template_name = 'widgets/checkbox_option.html'
def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]:
ctx = super().get_context(name, value, attrs)['widget']
ctx["multipleChoiceProperties"] = []
for _group, options, _index in ctx["optgroups"]:
for option in options:
if not isinstance(value,str) and not isinstance(value,list) and ( option["value"] in value.selected_choices or ( value.allow_all and value.all_choice in value.selected_choices ) ):
checked = True
else:
checked = False
ctx["multipleChoiceProperties"].append({
"template_name": option["template_name"],
"type": option["type"],
"value": option["value"],
"label": option["label"],
"name": option["name"],
"checked": checked})
return { 'widget': ctx }
# this is a database field!
class CommaSepChoiceField(models.Field):
"Implements comma-separated storage of lists"
def __init__(self, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, *args, **kwargs):
self.separator = separator
self.possible_choices = possible_choices
self.selected_choices = []
self.allow_all = allow_all
self.all_label = all_label
self.all_choice = all_choice
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.separator != ",":
kwargs['separator'] = self.separator
kwargs['possible_choices'] = self.possible_choices
return name, path, args, kwargs
def db_type(self, connection):
return 'text'
def get_my_choices(self):
choiceArray = []
if self.possible_choices is None:
return choiceArray
if self.allow_all:
choiceArray.append((self.all_choice, _(self.all_label)))
for t in self.possible_choices:
choiceArray.append(t)
return choiceArray
def formfield(self, **kwargs):
# This is a fairly standard way to set up some defaults
# while letting the caller override them.
defaults = {'form_class': MultipleChoiceField,
'choices': self.get_my_choices,
'widget': CustomCheckboxSelectMultiple,
'label': '',
'required': False}
defaults.update(kwargs)
#del defaults.required
return super().formfield(**defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# Only include kwarg if it's not the default
if self.separator != ",":
kwargs['separator'] = self.separator
return name, path, args, kwargs
def from_db_value(self, value, expr, conn):
if value is None:
self.selected_choices = []
else:
self.selected_choices = value.split(",")
return self
def get_prep_value(self, value):
if value is None:
return ""
if not isinstance(value,list):
return ""
if self.all_choice not in value:
return ",".join(value)
else:
return self.all_choice
def get_text_for_value(self, val):
fval = [i for i in self.possible_choices if i[0] == val]
if len(fval) <= 0:
return []
else:
return fval[0][1]

View File

@@ -0,0 +1,19 @@
from django.core.management.base import BaseCommand
from sync.models import Media
from common.logger import log
class Command(BaseCommand):
help = 'Resets all media item metadata'
def handle(self, *args, **options):
log.info('Resettings all media metadata...')
# Delete all metadata
Media.objects.update(metadata=None)
# Trigger the save signal on each media item
for item in Media.objects.all():
item.save()
log.info('Done')

View File

@@ -53,6 +53,8 @@ def get_best_audio_format(media):
# If the format has a video stream, skip it
if fmt['vcodec'] is not None:
continue
if not fmt['acodec']:
continue
audio_formats.append(fmt)
audio_formats = list(reversed(sorted(audio_formats, key=lambda k: k['abr'])))
if not audio_formats:
@@ -88,6 +90,8 @@ def get_best_video_format(media):
# If the format has an audio stream, skip it
if fmt['acodec'] is not None:
continue
if not fmt['vcodec']:
continue
if media.source.source_resolution.strip().upper() == fmt['format']:
video_formats.append(fmt)
# Check we matched some streams

View File

@@ -44,7 +44,9 @@ class PlexMediaServer(MediaServer):
'<p>The <strong>libraries</strong> is a comma-separated list of Plex '
'library or section IDs, you can find out how to get your library or '
'section IDs <a href="https://support.plex.tv/articles/201242707-plex-'
'media-scanner-via-command-line/#toc-1" target="_blank">here</a>.</p>')
'media-scanner-via-command-line/#toc-1" target="_blank">here</a> or '
'<a href="https://www.plexopedia.com/plex-media-server/api/server/libraries/" '
'target="_blank">here</a></p>.')
def make_request(self, uri='/', params={}):
headers = {'User-Agent': 'TubeSync'}

View File

@@ -0,0 +1,25 @@
# Generated by Django 3.2.12 on 2022-04-06 06:19
from django.conf import settings
from django.db import migrations, models
def fix_media_file(apps, schema_editor):
Media = apps.get_model('sync', 'Media')
for media in Media.objects.filter(downloaded=True):
download_dir = str(settings.DOWNLOAD_ROOT)
if media.media_file.name.startswith(download_dir):
media.media_file.name = media.media_file.name[len(download_dir) + 1:]
media.save()
class Migration(migrations.Migration):
dependencies = [
('sync', '0012_alter_media_downloaded_format'),
]
operations = [
migrations.RunPython(fix_media_file)
]

View File

@@ -0,0 +1,21 @@
# Generated by Django 3.2.15 on 2022-12-28 20:33
import django.core.files.storage
from django.conf import settings
from django.db import migrations, models
import sync.models
class Migration(migrations.Migration):
dependencies = [
('sync', '0013_fix_elative_media_file'),
]
operations = [
migrations.AlterField(
model_name='media',
name='media_file',
field=models.FileField(blank=True, help_text='Media file', max_length=255, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/media-data/', location=str(settings.DOWNLOAD_ROOT)), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.2.17 on 2023-02-13 06:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0014_alter_media_media_file'),
]
operations = [
migrations.AddField(
model_name='media',
name='manual_skip',
field=models.BooleanField(db_index=True, default=False, help_text='Media marked as "skipped", won\' be downloaded', verbose_name='manual_skip'),
),
migrations.AlterField(
model_name='media',
name='skip',
field=models.BooleanField(db_index=True, default=False, help_text='INTERNAL FLAG - Media will be skipped and not downloaded', verbose_name='skip'),
),
]

View File

@@ -0,0 +1,34 @@
# Generated by Django 3.2.18 on 2023-02-14 20:52
from django.db import migrations, models
import sync.models
class Migration(migrations.Migration):
dependencies = [
('sync', '0015_auto_20230213_0603'),
]
operations = [
migrations.AddField(
model_name='source',
name='embed_metadata',
field=models.BooleanField(default=False, help_text='Embed metadata from source into file', verbose_name='embed metadata'),
),
migrations.AddField(
model_name='source',
name='embed_thumbnail',
field=models.BooleanField(default=False, help_text='Embed thumbnail into the file', verbose_name='embed thumbnail'),
),
migrations.AddField(
model_name='source',
name='enable_sponsorblock',
field=models.BooleanField(default=True, help_text='Use SponsorBlock?', verbose_name='enable sponsorblock'),
),
migrations.AddField(
model_name='source',
name='sponsorblock_categories',
field=sync.models.CommaSepChoiceField(default='all', possible_choices=(('all', 'All'), ('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section'))),
),
]

View File

@@ -0,0 +1,19 @@
# Generated by Django 3.2.18 on 2023-02-20 02:23
from django.db import migrations
import sync.fields
class Migration(migrations.Migration):
dependencies = [
('sync', '0016_auto_20230214_2052'),
]
operations = [
migrations.AlterField(
model_name='source',
name='sponsorblock_categories',
field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', separator=''),
),
]

View File

@@ -0,0 +1,27 @@
# Generated by pac
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0017_alter_source_sponsorblock_categories'),
]
operations = [
migrations.AddField(
model_name='source',
name='write_subtitles',
field=models.BooleanField(default=False, help_text='Download video subtitles', verbose_name='write subtitles'),
),
migrations.AddField(
model_name='source',
name='auto_subtitles',
field=models.BooleanField(default=False, help_text='Accept auto-generated subtitles', verbose_name='accept auto subtitles'),
),
migrations.AddField(
model_name='source',
name='sub_langs',
field=models.CharField(default='en', help_text='List of subtitles langs to download comma-separated. Example: en,fr',max_length=30),
),
]

View File

@@ -0,0 +1,17 @@
# Generated by pac
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0018_source_subtitles'),
]
operations = [
migrations.AddField(
model_name='source',
name='delete_removed_media',
field=models.BooleanField(default=False, help_text='Delete media that is no longer on this playlist', verbose_name='delete removed media'),
),
]

View File

@@ -0,0 +1,29 @@
# Generated by Django 3.2.22 on 2023-10-24 17:25
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0019_add_delete_removed_media'),
]
operations = [
migrations.AddField(
model_name='source',
name='filter_text',
field=models.CharField(blank=True, default='', help_text='Regex compatible filter string for video titles', max_length=100, verbose_name='filter string'),
),
migrations.AlterField(
model_name='source',
name='auto_subtitles',
field=models.BooleanField(default=False, help_text='Accept auto-generated subtitles', verbose_name='accept auto-generated subs'),
),
migrations.AlterField(
model_name='source',
name='sub_langs',
field=models.CharField(default='en', help_text='List of subtitles langs to download, comma-separated. Example: en,fr or all,-fr,-live_chat', max_length=30, validators=[django.core.validators.RegexValidator(message='Subtitle langs must be a comma-separated list of langs. example: en,fr or all,-fr,-live_chat', regex='^(\\-?[\\_\\.a-zA-Z]+,)*(\\-?[\\_\\.a-zA-Z]+){1}$')], verbose_name='subs langs'),
),
]

View File

@@ -0,0 +1,17 @@
# Generated by pac
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0020_auto_20231024_1825'),
]
operations = [
migrations.AddField(
model_name='source',
name='delete_files_on_disk',
field=models.BooleanField(default=False, help_text='Delete files on disk when they are removed from TubeSync', verbose_name='delete files on disk'),
),
]

View File

@@ -1,6 +1,7 @@
import os
import uuid
import json
import re
from xml.etree import ElementTree
from collections import OrderedDict
from datetime import datetime, timedelta
@@ -8,6 +9,7 @@ from pathlib import Path
from django.conf import settings
from django.db import models
from django.core.files.storage import FileSystemStorage
from django.core.validators import RegexValidator
from django.utils.text import slugify
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
@@ -16,13 +18,12 @@ from common.utils import clean_filename
from .youtube import (get_media_info as get_youtube_media_info,
download_media as download_youtube_media)
from .utils import seconds_to_timestr, parse_media_format
from .matching import (get_best_combined_format, get_best_audio_format,
from .matching import (get_best_combined_format, get_best_audio_format,
get_best_video_format)
from .mediaservers import PlexMediaServer
from .fields import CommaSepChoiceField
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT))
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT), base_url='/media-data/')
class Source(models.Model):
'''
@@ -106,6 +107,43 @@ class Source(models.Model):
EXTENSION_MKV = 'mkv'
EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV)
# as stolen from: https://wiki.sponsor.ajay.app/w/Types / https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/postprocessor/sponsorblock.py
SPONSORBLOCK_CATEGORIES_CHOICES = (
('sponsor', 'Sponsor'),
('intro', 'Intermission/Intro Animation'),
('outro', 'Endcards/Credits'),
('selfpromo', 'Unpaid/Self Promotion'),
('preview', 'Preview/Recap'),
('filler', 'Filler Tangent'),
('interaction', 'Interaction Reminder'),
('music_offtopic', 'Non-Music Section'),
)
sponsorblock_categories = CommaSepChoiceField(
_(''),
possible_choices=SPONSORBLOCK_CATEGORIES_CHOICES,
all_choice='all',
allow_all=True,
all_label='(all options)',
default='all',
help_text=_('Select the sponsorblocks you want to enforce')
)
embed_metadata = models.BooleanField(
_('embed metadata'),
default=False,
help_text=_('Embed metadata from source into file')
)
embed_thumbnail = models.BooleanField(
_('embed thumbnail'),
default=False,
help_text=_('Embed thumbnail into the file')
)
enable_sponsorblock = models.BooleanField(
_('enable sponsorblock'),
default=True,
help_text=_('Use SponsorBlock?')
)
# Fontawesome icons used for the source on the front end
ICONS = {
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
@@ -246,6 +284,23 @@ class Source(models.Model):
help_text=_('If "delete old media" is ticked, the number of days after which '
'to automatically delete media')
)
filter_text = models.CharField(
_('filter string'),
max_length=100,
default='',
blank=True,
help_text=_('Regex compatible filter string for video titles')
)
delete_removed_media = models.BooleanField(
_('delete removed media'),
default=False,
help_text=_('Delete media that is no longer on this playlist')
)
delete_files_on_disk = models.BooleanField(
_('delete files on disk'),
default=False,
help_text=_('Delete files on disk when they are removed from TubeSync')
)
source_resolution = models.CharField(
_('source resolution'),
max_length=8,
@@ -309,6 +364,30 @@ class Source(models.Model):
help_text=_('Source has failed to index media')
)
write_subtitles = models.BooleanField(
_('write subtitles'),
default=False,
help_text=_('Download video subtitles')
)
auto_subtitles = models.BooleanField(
_('accept auto-generated subs'),
default=False,
help_text=_('Accept auto-generated subtitles')
)
sub_langs = models.CharField(
_('subs langs'),
max_length=30,
default='en',
help_text=_('List of subtitles langs to download, comma-separated. Example: en,fr or all,-fr,-live_chat'),
validators=[
RegexValidator(
regex=r"^(\-?[\_\.a-zA-Z]+,)*(\-?[\_\.a-zA-Z]+){1}$",
message=_('Subtitle langs must be a comma-separated list of langs. example: en,fr or all,-fr,-live_chat')
)
]
)
def __str__(self):
return self.name
@@ -392,10 +471,14 @@ class Source(models.Model):
@property
def directory_path(self):
download_dir = Path(media_file_storage.location)
return download_dir / self.type_directory_path
@property
def type_directory_path(self):
if self.source_resolution == self.SOURCE_RESOLUTION_AUDIO:
return download_dir / settings.DOWNLOAD_AUDIO_DIR / self.directory
return Path(settings.DOWNLOAD_AUDIO_DIR) / self.directory
else:
return download_dir / settings.DOWNLOAD_VIDEO_DIR / self.directory
return Path(settings.DOWNLOAD_VIDEO_DIR) / self.directory
def make_directory(self):
return os.makedirs(self.directory_path, exist_ok=True)
@@ -442,6 +525,7 @@ class Source(models.Model):
'dd': now.strftime('%d'),
'source': self.slugname,
'source_full': self.name,
'uploader': 'Some Channel Name',
'title': 'some-media-title-name',
'title_full': 'Some Media Title Name',
'key': 'SoMeUnIqUiD',
@@ -463,6 +547,11 @@ class Source(models.Model):
except Exception as e:
return ''
def is_regex_match(self, media_item_title):
if not self.filter_text:
return True
return bool(re.search(self.filter_text, media_item_title))
def index_media(self):
'''
Index the media source returning a list of media metadata as dicts.
@@ -473,7 +562,11 @@ class Source(models.Model):
response = indexer(self.index_url)
if not isinstance(response, dict):
return []
return response.get('entries', [])
entries = response.get('entries', [])
if settings.MAX_ENTRIES_PROCESSING:
entries = entries[:settings.MAX_ENTRIES_PROCESSING]
return entries
def get_media_thumb_path(instance, filename):
@@ -671,7 +764,13 @@ class Media(models.Model):
_('skip'),
db_index=True,
default=False,
help_text=_('Media will be skipped and not downloaded')
help_text=_('INTERNAL FLAG - Media will be skipped and not downloaded')
)
manual_skip = models.BooleanField(
_('manual_skip'),
db_index=True,
default=False,
help_text=_('Media marked as "skipped", won\' be downloaded')
)
downloaded = models.BooleanField(
_('downloaded'),
@@ -862,7 +961,7 @@ class Media(models.Model):
# Otherwise, calculate from matched format codes
vformat = None
aformat = None
if '+' in format_str:
if format_str and '+' in format_str:
# Seperate audio and video streams
vformat_code, aformat_code = format_str.split('+')
vformat = self.get_format_by_code(vformat_code)
@@ -871,7 +970,7 @@ class Media(models.Model):
# Combined stream or audio only
cformat = self.get_format_by_code(format_str)
aformat = cformat
if cformat['vcodec']:
if cformat and cformat['vcodec']:
# Combined
vformat = cformat
if vformat:
@@ -941,6 +1040,7 @@ class Media(models.Model):
'acodec': display_format['acodec'],
'fps': display_format['fps'],
'hdr': display_format['hdr'],
'uploader': self.uploader,
}
@property
@@ -1005,7 +1105,7 @@ class Media(models.Model):
duration = self.loaded_metadata.get(field, 0)
try:
duration = int(duration)
except ValueError:
except (TypeError, ValueError):
duration = 0
return duration
@@ -1128,6 +1228,29 @@ class Media(models.Model):
return False
return os.path.exists(self.media_file.path)
@property
def content_type(self):
if not self.downloaded:
return 'video/mp4'
vcodec = self.downloaded_video_codec
if vcodec is None:
acodec = self.downloaded_audio_codec
if acodec is None:
raise TypeError() # nothing here.
acodec = acodec.lower()
if acodec == "mp4a":
return "audio/mp4"
elif acodec == "opus":
return "audio/opus"
else:
# fall-fall-back.
return 'audio/ogg'
vcodec = vcodec.lower()
if vcodec == 'vp9':
return 'video/webm'
else:
return 'video/mp4'
@property
def nfoxml(self):
'''
@@ -1145,6 +1268,22 @@ class Media(models.Model):
showtitle.text = str(self.source.name).strip()
showtitle.tail = '\n '
nfo.append(showtitle)
# season = upload date year
season = nfo.makeelement('season', {})
if self.source.source_type == Source.SOURCE_TYPE_YOUTUBE_PLAYLIST:
# If it's a playlist, set season to 1
season.text = '1'
else:
# If it's not a playlist, set season to upload date year
season.text = str(self.upload_date.year) if self.upload_date else ''
season.tail = '\n '
nfo.append(season)
# episode = number of video in the year
episode = nfo.makeelement('episode', {})
episode_number = self.calculate_episode_number()
episode.text = str(episode_number) if episode_number else ''
episode.tail = '\n '
nfo.append(episode)
# ratings = media metadata youtube rating
value = nfo.makeelement('value', {})
value.text = str(self.rating)
@@ -1252,7 +1391,10 @@ class Media(models.Model):
f'no valid format available')
# Download the media with youtube-dl
download_youtube_media(self.url, format_str, self.source.extension,
str(self.filepath), self.source.write_json)
str(self.filepath), self.source.write_json,
self.source.sponsorblock_categories.selected_choices, self.source.embed_thumbnail,
self.source.embed_metadata, self.source.enable_sponsorblock,
self.source.write_subtitles, self.source.auto_subtitles,self.source.sub_langs )
# Return the download paramaters
return format_str, self.source.extension
@@ -1262,10 +1404,23 @@ class Media(models.Model):
'''
indexer = self.INDEXERS.get(self.source.source_type, None)
if not callable(indexer):
raise Exception(f'Meida with source type f"{self.source.source_type}" '
raise Exception(f'Media with source type f"{self.source.source_type}" '
f'has no indexer')
return indexer(self.url)
def calculate_episode_number(self):
if self.source.source_type == Source.SOURCE_TYPE_YOUTUBE_PLAYLIST:
sorted_media = Media.objects.filter(source=self.source)
else:
self_year = self.upload_date.year if self.upload_date else self.created.year
filtered_media = Media.objects.filter(source=self.source, published__year=self_year)
sorted_media = sorted(filtered_media, key=lambda x: (x.upload_date, x.key))
position_counter = 1
for media in sorted_media:
if media == self:
return position_counter
position_counter += 1
class MediaServer(models.Model):
'''

View File

@@ -1,4 +1,5 @@
import os
import glob
from django.conf import settings
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete
from django.dispatch import receiver
@@ -74,6 +75,7 @@ def source_pre_delete(sender, instance, **kwargs):
media.delete()
@receiver(post_delete, sender=Source)
def source_post_delete(sender, instance, **kwargs):
# Triggered after a source is deleted
@@ -93,13 +95,17 @@ def task_task_failed(sender, task_id, completed_task, **kwargs):
@receiver(post_save, sender=Media)
def media_post_save(sender, instance, created, **kwargs):
# If the media is skipped manually, bail.
if instance.manual_skip:
return
# Triggered after media is saved
cap_changed = False
can_download_changed = False
# Reset the skip flag if the download cap has changed if the media has not
# already been downloaded
if not instance.downloaded:
if not instance.downloaded and instance.metadata:
max_cap_age = instance.source.download_cap_date
filter_text = instance.source.filter_text.strip()
published = instance.published
if not published:
if not instance.skip:
@@ -113,11 +119,20 @@ def media_post_save(sender, instance, created, **kwargs):
else:
if max_cap_age:
if published > max_cap_age and instance.skip:
# Media was published after the cap date but is set to be skipped
log.info(f'Media: {instance.source} / {instance} has a valid '
f'publishing date, marking to be unskipped')
instance.skip = False
cap_changed = True
if filter_text:
if instance.source.is_regex_match(instance.title):
log.info(f'Media: {instance.source} / {instance} has a valid '
f'publishing date and title filter, marking to be unskipped')
instance.skip = False
cap_changed = True
else:
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date '
f'but failed the title filter match, already marked skipped')
else:
log.info(f'Media: {instance.source} / {instance} has a valid '
f'publishing date, marking to be unskipped')
instance.skip = False
cap_changed = True
elif published <= max_cap_age and not instance.skip:
log.info(f'Media: {instance.source} / {instance} is too old for '
f'the download cap date, marking to be skipped')
@@ -126,10 +141,20 @@ def media_post_save(sender, instance, created, **kwargs):
else:
if instance.skip:
# Media marked to be skipped but source download cap removed
log.info(f'Media: {instance.source} / {instance} has a valid '
f'publishing date, marking to be unskipped')
instance.skip = False
cap_changed = True
if filter_text:
if instance.source.is_regex_match(instance.title):
log.info(f'Media: {instance.source} / {instance} has a valid '
f'publishing date and title filter, marking to be unskipped')
instance.skip = False
cap_changed = True
else:
log.info(f'Media: {instance.source} / {instance} has a valid publishing date '
f'but failed the title filter match, already marked skipped')
else:
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date and '
f'is already marked as not to be skipped')
cap_changed = False
# Recalculate the "can_download" flag, this may
# need to change if the source specifications have been changed
if instance.metadata:
@@ -152,7 +177,7 @@ def media_post_save(sender, instance, created, **kwargs):
verbose_name = _('Downloading metadata for "{}"')
download_media_metadata(
str(instance.pk),
priority=10,
priority=5,
verbose_name=verbose_name.format(instance.pk),
remove_existing_tasks=True
)
@@ -199,6 +224,16 @@ def media_pre_delete(sender, instance, **kwargs):
if thumbnail_url:
delete_task_by_media('sync.tasks.download_media_thumbnail',
(str(instance.pk), thumbnail_url))
if instance.source.delete_files_on_disk and (instance.media_file or instance.thumb):
# Delete all media files if it contains filename
filepath = instance.media_file.path if instance.media_file else instance.thumb.path
barefilepath, fileext = os.path.splitext(filepath)
# Get all files that start with the bare file path
all_related_files = glob.glob(f'{barefilepath}.*')
for file in all_related_files:
log.info(f'Deleting file for: {instance} path: {file}')
delete_file(file)
@receiver(post_delete, sender=Media)

View File

@@ -132,16 +132,23 @@ def cleanup_completed_tasks():
def cleanup_old_media():
for media in Media.objects.filter(download_date__isnull=False):
if media.source.delete_old_media and media.source.days_to_keep > 0:
delta = timezone.now() - timedelta(days=media.source.days_to_keep)
if media.downloaded and media.download_date < delta:
# Media was downloaded after the cutoff date, delete it
log.info(f'Deleting expired media: {media.source} / {media} '
f'(now older than {media.source.days_to_keep} days / '
f'download_date before {delta})')
# .delete() also triggers a pre_delete signal that removes the files
media.delete()
for source in Source.objects.filter(delete_old_media=True, days_to_keep__gt=0):
delta = timezone.now() - timedelta(days=source.days_to_keep)
for media in source.media_source.filter(downloaded=True, download_date__lt=delta):
log.info(f'Deleting expired media: {source} / {media} '
f'(now older than {source.days_to_keep} days / '
f'download_date before {delta})')
# .delete() also triggers a pre_delete signal that removes the files
media.delete()
def cleanup_removed_media(source, videos):
media_objects = Media.objects.filter(source=source, downloaded=True)
for item in media_objects:
matching_source_item = [video['id'] for video in videos if video['id'] == item.key]
if not matching_source_item:
log.info(f'{item.title} is no longer in source, removing')
item.delete()
@background(schedule=0)
@@ -153,7 +160,6 @@ def index_source_task(source_id):
source = Source.objects.get(pk=source_id)
except Source.DoesNotExist:
# Task triggered but the Source has been deleted, delete the task
delete_index_source_task(source_id)
return
# Reset any errors
source.has_failed = False
@@ -189,6 +195,9 @@ def index_source_task(source_id):
cleanup_completed_tasks()
# Tack on a cleanup of old media
cleanup_old_media()
if source.delete_removed_media:
log.info(f'Cleaning up media no longer in source {source}')
cleanup_removed_media(source, videos)
@background(schedule=0)
@@ -202,7 +211,6 @@ def check_source_directory_exists(source_id):
source = Source.objects.get(pk=source_id)
except Source.DoesNotExist:
# Task triggered but the Source has been deleted, delete the task
delete_index_source_task(source_id)
return
# Check the source output directory exists
if not source.directory_exists():
@@ -223,6 +231,9 @@ def download_media_metadata(media_id):
log.error(f'Task download_media_metadata(pk={media_id}) called but no '
f'media exists with ID: {media_id}')
return
if media.manual_skip:
log.info(f'Task for ID: {media_id} skipped, due to task being manually skipped.')
return
source = media.source
metadata = media.index_metadata()
media.metadata = json.dumps(metadata, default=json_serial)
@@ -241,6 +252,11 @@ def download_media_metadata(media_id):
log.warn(f'Media: {source} / {media} is older than cap age '
f'{max_cap_age}, skipping')
media.skip = True
# If the source has a search filter, check the video title matches the filter
if source.filter_text and not source.is_regex_match(media.title):
# Filter text not found in the media title. Accepts regex string, blank search filter results in this returning false
log.warn(f'Media: {source} / {media} does not match {source.filter_text}, skipping')
media.skip = True
# If the source has a cut-off check the upload date is within the allowed delta
if source.delete_old_media and source.days_to_keep > 0:
if not isinstance(media.published, datetime):
@@ -341,7 +357,7 @@ def download_media(media_id):
log.info(f'Successfully downloaded media: {media} (UUID: {media.pk}) to: '
f'"{filepath}"')
# Link the media file to the object and update info about the download
media.media_file.name = str(filepath)
media.media_file.name = str(media.source.type_directory_path / media.filename)
media.downloaded = True
media.download_date = timezone.now()
media.downloaded_filesize = os.path.getsize(filepath)

View File

@@ -43,6 +43,11 @@
<td>Full source name</td>
<td>My Source</td>
</tr>
<tr>
<td>{uploader}</td>
<td>Uploader name</td>
<td>Some Channel Name</td>
</tr>
<tr>
<td>{title}</td>
<td>Lower case media title, max 80 chars</td>

View File

@@ -9,10 +9,24 @@
{% if media.title %}<h2 class="truncate"><strong>{{ media.title }}</strong></h2>{% endif %}
<p class="truncate"><strong><a href="{{ media.url }}" target="_blank"><i class="fas fa-link"></i> {{ media.url }}</a></strong></p>
<p class="truncate">Downloading to: <strong>{{ media.source.directory_path }}</strong></p>
{% if download_state == 'downloaded' %}
{% if media.source.is_audio %}
<audio controls src="{% url 'sync:media-content' pk=media.pk %}"></audio>
{% else %}
<video controls style="width: 100%">
<source src="{% url 'sync:media-content' pk=media.pk %}">
</video>
{% endif %}
<p class="truncate"><a href="{% url 'sync:media-content' pk=media.pk %}" download="{{ media.filename }}"><strong><i class="fas fa-download"></i> Download</strong></a></p>
{% endif %}
</div>
</div>
{% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
{% if media.skip %}{% include 'errorbox.html' with message='Media is marked to be skipped and will not be downloaded.' %}{% endif %}
{% if media.manual_skip %}{% include 'errorbox.html' with message='Media is marked to be skipped and will not be downloaded.' %}
{% else %}
{% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
{% if media.skip %}{% include 'errorbox.html' with message='This media may be skipped due to error(s).' %}{% endif %}
{% endif %}
{% include 'infobox.html' with message=message %}
<div class="row">
<div class="col s12 m7">
@@ -156,10 +170,10 @@
{% else %}
<div class="row">
<div class="col s12">
{% if media.skip %}
<a href="{% url 'sync:enable-media' pk=media.pk %}" class="btn">Enable (unskip) media <i class="fas fa-cloud-download-alt"></i></a>
{% if media.manual_skip %}
<a href="{% url 'sync:enable-media' pk=media.pk %}" class="btn">Unskip media (manually) <i class="fas fa-cloud-download-alt"></i></a>
{% else %}
<a href="{% url 'sync:skip-media' pk=media.pk %}" class="btn delete-button">Skip media <i class="fas fa-times-circle"></i></a>
<a href="{% url 'sync:skip-media' pk=media.pk %}" class="btn delete-button">Manually mark media to be skipped <i class="fas fa-times-circle"></i></a>
{% endif %}
</div>
</div>

View File

@@ -36,8 +36,10 @@
{% if m.downloaded %}
<i class="fas fa-check-circle" title="Downloaded"></i> {{ m.download_date|date:'Y-m-d' }}
{% else %}
{% if m.skip %}
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
{% if m.manual_skip %}
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Manually skipped</span>
{% elif m.skip %}
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped by system</span>
{% elif not m.source.download_media %}
<span class="error-text"><i class="fas fa-times" title="Not downloading media for this source"></i> Disabled at source</span>
{% elif not m.has_metadata %}
@@ -62,5 +64,5 @@
</div>
{% endfor %}
</div>
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped %}
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped only_skipped=only_skipped%}
{% endblock %}

View File

@@ -9,8 +9,8 @@
<p>
Are you sure you want to delete this source? Deleting a source is permanent.
By default, deleting a source does not delete any saved media files. You can
tick the &quot;also delete downloaded media&quot; checkbox to also remove save
media when you delete the source. Deleting a source cannot be undone.
<strong>tick the &quot;also delete downloaded media&quot; checkbox to also remove directory {{ source.directory_path }}
</strong>when you delete the source. Deleting a source cannot be undone.
</p>
</div>
</div>

View File

@@ -43,6 +43,10 @@
<td class="hide-on-small-only">Directory</td>
<td><span class="hide-on-med-and-up">Directory<br></span><strong>{{ source.directory }}</strong></td>
</tr>
<tr title="Filter text">
<td class="hide-on-small-only">Filter text</td>
<td><span class="hide-on-med-and-up">Filter text<br></span><strong>{{ source.filter_text }}</strong></td>
</tr>
<tr title="Media file name format to use for saving files">
<td class="hide-on-small-only">Media format</td>
<td><span class="hide-on-med-and-up">Media format<br></span><strong>{{ source.media_format }}</strong></td>
@@ -115,6 +119,14 @@
<td class="hide-on-small-only">Write JSON?</td>
<td><span class="hide-on-med-and-up">Write JSON?<br></span><strong>{% if source.write_json %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
</tr>
<tr title="Delete media that is no longer on this playlist?">
<td class="hide-on-small-only">Delete removed media</td>
<td><span class="hide-on-med-and-up">Delete removed media<br></span><strong>{% if source.delete_removed_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
</tr>
<tr title="Delete files on disk when they are removed from TubeSync?">
<td class="hide-on-small-only">Delete files on disk</td>
<td><span class="hide-on-med-and-up">Delete files on disk<br></span><strong>{% if source.delete_files_on_disk %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
</tr>
{% if source.delete_old_media and source.days_to_keep > 0 %}
<tr title="Days after which your media from this source will be locally deleted">
<td class="hide-on-small-only">Delete old media</td>
@@ -130,6 +142,55 @@
<td class="hide-on-small-only">UUID</td>
<td><span class="hide-on-med-and-up">UUID<br></span><strong>{{ source.uuid }}</strong></td>
</tr>
<tr title="{{ _('Embedding thumbnail?') }}">
<td class="hide-on-small-only">{{ _("Embed thumbnail?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Embed thumbnail?") }}<br></span><strong><i class="fas {% if source.embed_thumbnail %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
<tr title="{{ _('Embedding metadata?') }}">
<td class="hide-on-small-only">{{ _("Embed metadata?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Embed metadata?") }}<br></span><strong><i class="fas {% if source.embed_metadata %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
<tr title="{{ _('Is sponsorblock enabled?') }}">
<td class="hide-on-small-only">{{ _("SponsorBlock?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Sponsorblock enabled?") }}<br></span><strong><i class="fas {% if source.enable_sponsorblock %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
{% if source.enable_sponsorblock %}
<tr title="{{ _('SponsorBlock: What to block?') }}">
<td class="hide-on-small-only">{{ _("What blocked?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("What blocked?") }}<br></span><strong>
{% if source.sponsorblock_categories.all_choice in source.sponsorblock_categories.selected_choices %}
{% for k,v in source.sponsorblock_categories.possible_choices %}
{{ v }}: <i class="fas fa-check"></i><BR>
{% endfor %}
{% else %}
{% for c in source.sponsorblock_categories.selected_choices %}
{% for k,v in source.sponsorblock_categories.possible_choices %}
{% if k == c %} {{ v }}: <i class="fas fa-check"></i><BR>{% endif %}
{% endfor %}
{% endfor %}
{% endif %}
</strong></td>
</tr>
{% endif %}
<tr title="{{ _('Are Subtitles downloaded?') }}">
<td class="hide-on-small-only">{{ _("Download subtitles?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Download subtitles?") }}:</span><strong><i class="fas {% if source.write_subtitles %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
{% if source.write_subtitles %}
<tr title="{{ _('Are auto subs accepted?') }}">
<td class="hide-on-small-only">{{ _("Auto-generated subtitles?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Auto-generated subtitles?") }}:</span><strong><i class="fas {% if source.auto_subtitles %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
<tr title="{{ _('Subs langs?') }}">
<td class="hide-on-small-only">{{ _("Subs langs?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Subs langs?") }}:</span><strong>{{source.sub_langs}}</strong></td>
</tr>
{% endif %}
</table>
</div>
</div>

View File

@@ -24,15 +24,18 @@
<div class="col s12">
<div class="collection">
{% for source in sources %}
<a href="{% url 'sync:source' pk=source.pk %}" class="collection-item">
{{ source.icon|safe }} <strong>{{ source.name }}</strong> ({{ source.get_source_type_display }} &quot;{{ source.key }}&quot;)<br>
{{ source.format_summary }}<br>
{% if source.has_failed %}
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
{% else %}
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}
{% endif %}
</a>
<span class="collection-item flex-collection-container">
<a href="{% url 'sync:source' pk=source.pk %}" class="flex-grow">
{{ source.icon|safe }} <strong>{{ source.name }}</strong> ({{ source.get_source_type_display }} &quot;{{ source.key }}&quot;)<br>
{{ source.format_summary }}<br>
{% if source.has_failed %}
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
{% else %}
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}
{% endif %}
</a>
<a href="{% url 'sync:source-sync-now' pk=source.pk %}" class="collection-item"><i class="fas fa-arrow-rotate-right"></i></a>
</span>
{% empty %}
<span class="collection-item no-items"><i class="fas fa-info-circle"></i> You haven't added any sources.</span>
{% endfor %}

View File

@@ -66,7 +66,7 @@
{% for task in scheduled %}
<a href="{% url task.url pk=task.instance.pk %}" class="collection-item">
<i class="fas fa-stopwatch"></i> <strong>{{ task }}</strong><br>
{% if task.instance.index_schedule %}Scheduled to run {{ task.instance.get_index_schedule_display|lower }}.<br>{% endif %}
{% if task.instance.index_schedule and task.repeat > 0 %}Scheduled to run {{ task.instance.get_index_schedule_display|lower }}.<br>{% endif %}
<i class="fas fa-redo"></i> Task will run {% if task.run_now %}<strong>immediately</strong>{% else %}at <strong>{{ task.run_at|date:'Y-m-d H:i:s' }}</strong>{% endif %}
</a>
{% empty %}

View File

@@ -0,0 +1,7 @@
<!--<input type="{{ option.type }}" name="{{ option.name }}" value="{{ option.value }}" id="{{ option.value }}"><BR>
<label for="{{ option.value }}">{{option.label}}</label>-->
<label>
<input type="{{ option.type }}" name="{{ option.name }}" value="{{ option.value }}" id="{{ option.value }}" {% if option.checked %}checked{% endif %}>
<span>{{option.label}}</span>
</label>

View File

@@ -0,0 +1,5 @@
</label>
{% for option in widget.multipleChoiceProperties %}
{% include option.template_name with option=option %}
{% endfor %}
<label>

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,7 @@
import logging
from datetime import datetime
from datetime import datetime, timedelta
from urllib.parse import urlsplit
from xml.etree import ElementTree
from django.conf import settings
@@ -14,6 +14,7 @@ from django.test import TestCase, Client
from django.utils import timezone
from background_task.models import Task
from .models import Source, Media
from .tasks import cleanup_old_media
class FrontEndTestCase(TestCase):
@@ -36,6 +37,9 @@ class FrontEndTestCase(TestCase):
test_sources = {
'youtube-channel': {
'valid': (
'https://m.youtube.com/testchannel',
'https://m.youtube.com/c/testchannel',
'https://m.youtube.com/c/testchannel/videos',
'https://www.youtube.com/testchannel',
'https://www.youtube.com/c/testchannel',
'https://www.youtube.com/c/testchannel/videos',
@@ -47,6 +51,7 @@ class FrontEndTestCase(TestCase):
'invalid_domain': (
'https://www.test.com/c/testchannel',
'https://www.example.com/c/testchannel',
'https://n.youtube.com/c/testchannel',
),
'invalid_path': (
'https://www.youtube.com/test/invalid',
@@ -62,6 +67,8 @@ class FrontEndTestCase(TestCase):
},
'youtube-channel-id': {
'valid': (
'https://m.youtube.com/channel/channelid',
'https://m.youtube.com/channel/channelid/videos',
'https://www.youtube.com/channel/channelid',
'https://www.youtube.com/channel/channelid/videos',
),
@@ -72,6 +79,7 @@ class FrontEndTestCase(TestCase):
'invalid_domain': (
'https://www.test.com/channel/channelid',
'https://www.example.com/channel/channelid',
'https://n.youtube.com/channel/channelid',
),
'invalid_path': (
'https://www.youtube.com/test/invalid',
@@ -83,6 +91,8 @@ class FrontEndTestCase(TestCase):
},
'youtube-playlist': {
'valid': (
'https://m.youtube.com/playlist?list=testplaylist',
'https://m.youtube.com/watch?v=testvideo&list=testplaylist',
'https://www.youtube.com/playlist?list=testplaylist',
'https://www.youtube.com/watch?v=testvideo&list=testplaylist',
),
@@ -93,6 +103,7 @@ class FrontEndTestCase(TestCase):
'invalid_domain': (
'https://www.test.com/playlist?list=testplaylist',
'https://www.example.com/playlist?list=testplaylist',
'https://n.youtube.com/playlist?list=testplaylist',
),
'invalid_path': (
'https://www.youtube.com/notplaylist?list=testplaylist',
@@ -164,6 +175,7 @@ class FrontEndTestCase(TestCase):
'directory': 'testdirectory',
'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
'download_cap': 0,
'filter_text':'.*',
'index_schedule': 3600,
'delete_old_media': False,
'days_to_keep': 14,
@@ -172,7 +184,8 @@ class FrontEndTestCase(TestCase):
'source_acodec': 'OPUS',
'prefer_60fps': False,
'prefer_hdr': False,
'fallback': 'f'
'fallback': 'f',
'sub_langs': 'en',
}
response = c.post('/source-add', data)
self.assertEqual(response.status_code, 302)
@@ -205,6 +218,7 @@ class FrontEndTestCase(TestCase):
'directory': 'testdirectory',
'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
'download_cap': 0,
'filter_text':'.*',
'index_schedule': Source.IndexSchedule.EVERY_HOUR,
'delete_old_media': False,
'days_to_keep': 14,
@@ -213,7 +227,8 @@ class FrontEndTestCase(TestCase):
'source_acodec': Source.SOURCE_ACODEC_OPUS,
'prefer_60fps': False,
'prefer_hdr': False,
'fallback': Source.FALLBACK_FAIL
'fallback': Source.FALLBACK_FAIL,
'sub_langs': 'en',
}
response = c.post(f'/source-update/{source_uuid}', data)
self.assertEqual(response.status_code, 302)
@@ -234,6 +249,7 @@ class FrontEndTestCase(TestCase):
'directory': 'testdirectory',
'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
'download_cap': 0,
'filter_text':'.*',
'index_schedule': Source.IndexSchedule.EVERY_2_HOURS, # changed
'delete_old_media': False,
'days_to_keep': 14,
@@ -242,7 +258,8 @@ class FrontEndTestCase(TestCase):
'source_acodec': Source.SOURCE_ACODEC_OPUS,
'prefer_60fps': False,
'prefer_hdr': False,
'fallback': Source.FALLBACK_FAIL
'fallback': Source.FALLBACK_FAIL,
'sub_langs': 'en',
}
response = c.post(f'/source-update/{source_uuid}', data)
self.assertEqual(response.status_code, 302)
@@ -454,11 +471,14 @@ metadata_60fps_filepath = settings.BASE_DIR / 'sync' / 'testdata' / 'metadata_60
metadata_60fps = open(metadata_60fps_filepath, 'rt').read()
metadata_60fps_hdr_filepath = settings.BASE_DIR / 'sync' / 'testdata' / 'metadata_60fps_hdr.json'
metadata_60fps_hdr = open(metadata_60fps_hdr_filepath, 'rt').read()
metadata_20230629_filepath = settings.BASE_DIR / 'sync' / 'testdata' / 'metadata_2023-06-29.json'
metadata_20230629 = open(metadata_20230629_filepath, 'rt').read()
all_test_metadata = {
'boring': metadata,
'hdr': metadata_hdr,
'60fps': metadata_60fps,
'60fps+hdr': metadata_60fps_hdr,
'20230629': metadata_20230629,
}
@@ -641,6 +661,8 @@ class MediaTestCase(TestCase):
'<episodedetails>',
' <title>no fancy stuff title</title>',
' <showtitle>testname</showtitle>',
' <season>2017</season>',
' <episode></episode>',
' <ratings>',
' <rating default="True" max="5" name="youtube">',
' <value>1.2345</value>',
@@ -1387,3 +1409,118 @@ class FormatMatchingTestCase(TestCase):
match_type, format_code = self.media.get_best_video_format()
self.assertEqual(format_code, expected_format_code)
self.assertEqual(match_type, expeceted_match_type)
def test_metadata_20230629(self):
self.media.metadata = all_test_metadata['20230629']
expected_matches = {
# (format, vcodec, prefer_60fps, prefer_hdr): (match_type, code),
('360p', 'AVC1', False, True): (False, '134'), # Fallback match, no hdr
('360p', 'AVC1', True, False): (False, '134'), # Fallback match, no 60fps
('360p', 'AVC1', True, True): (False, '332'), # Fallback match, 60fps+hdr, switched to VP9
('360p', 'VP9', False, False): (True, '243'), # Exact match
('360p', 'VP9', False, True): (True, '332'), # Exact match, hdr
('360p', 'VP9', True, False): (False, '332'), # Fallback match, 60fps, extra hdr
('360p', 'VP9', True, True): (True, '332'), # Exact match, 60fps+hdr
('480p', 'AVC1', False, False): (True, '135'), # Exact match
('480p', 'AVC1', False, True): (False, '135'), # Fallback match, no hdr
('480p', 'AVC1', True, False): (False, '135'), # Fallback match, no 60fps
('480p', 'AVC1', True, True): (False, '333'), # Fallback match, 60fps+hdr, switched to VP9
('480p', 'VP9', False, False): (True, '244'), # Exact match
('480p', 'VP9', False, True): (True, '333'), # Exact match, hdr
('480p', 'VP9', True, False): (False, '333'), # Fallback match, 60fps, extra hdr
('480p', 'VP9', True, True): (True, '333'), # Exact match, 60fps+hdr
('720p', 'AVC1', False, False): (True, '136'), # Exact match
('720p', 'AVC1', False, True): (False, '136'), # Fallback match, no hdr
('720p', 'AVC1', True, False): (True, '298'), # Exact match, 60fps
('720p', 'AVC1', True, True): (False, '334'), # Fallback match, 60fps+hdr, switched to VP9
('720p', 'VP9', False, False): (True, '247'), # Exact match
('720p', 'VP9', False, True): (True, '334'), # Exact match, hdr
('720p', 'VP9', True, False): (True, '302'), # Exact match, 60fps
('720p', 'VP9', True, True): (True, '334'), # Exact match, 60fps+hdr
('1440p', 'AVC1', False, False): (False, '308'), # Fallback match, 60fps, switched to VP9 (no 1440p AVC1)
('1440p', 'AVC1', False, True): (False, '336'), # Fallback match, 60fps+hdr, switched to VP9 (no 1440p AVC1)
('1440p', 'AVC1', True, False): (False, '308'), # Fallback match, 60fps, switched to VP9 (no 1440p AVC1)
('1440p', 'AVC1', True, True): (False, '336'), # Fallback match, 60fps+hdr, switched to VP9 (no 1440p AVC1)
('1440p', 'VP9', False, False): (False, '308'), # Fallback, 60fps
('1440p', 'VP9', False, True): (True, '336'), # Exact match, hdr
('1440p', 'VP9', True, False): (True, '308'), # Exact match, 60fps
('1440p', 'VP9', True, True): (True, '336'), # Exact match, 60fps+hdr
('2160p', 'AVC1', False, False): (False, '315'), # Fallback, 60fps, switched to VP9 (no 2160p AVC1)
('2160p', 'AVC1', False, True): (False, '337'), # Fallback match, 60fps+hdr, switched to VP9 (no 2160p AVC1)
('2160p', 'AVC1', True, False): (False, '315'), # Fallback, switched to VP9 (no 2160p AVC1)
('2160p', 'AVC1', True, True): (False, '337'), # Fallback match, 60fps+hdr, switched to VP9 (no 2160p AVC1)
('2160p', 'VP9', False, False): (False, '315'), # Fallback, 60fps
('2160p', 'VP9', False, True): (True, '337'), # Exact match, hdr
('2160p', 'VP9', True, False): (True, '315'), # Exact match, 60fps
('2160p', 'VP9', True, True): (True, '337'), # Exact match, 60fps+hdr
('4320p', 'AVC1', False, False): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'AVC1', False, True): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'AVC1', True, False): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'AVC1', True, True): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'VP9', False, False): (False, '272'), # Fallback, 60fps (no other 8k streams)
('4320p', 'VP9', False, True): (False, '272'), # Fallback, 60fps (no other 8k streams)
('4320p', 'VP9', True, False): (True, '272'), # Exact match, 60fps
('4320p', 'VP9', True, True): (False, '272'), # Fallback, 60fps (no other 8k streams)
}
for params, expected in expected_matches.items():
resolution, vcodec, prefer_60fps, prefer_hdr = params
expeceted_match_type, expected_format_code = expected
self.source.source_resolution = resolution
self.source.source_vcodec = vcodec
self.source.prefer_60fps = prefer_60fps
self.source.prefer_hdr = prefer_hdr
# The aim here is to execute the matching code to find error paths, specific testing isn't required
self.media.get_best_video_format()
self.media.get_best_audio_format()
def test_is_regex_match(self):
self.media.metadata = all_test_metadata['boring']
expected_matches = {
('.*'): (True),
('no fancy stuff'): (True),
('No fancy stuff'): (False),
('(?i)No fancy stuff'): (True), #set case insensitive flag
('no'): (True),
('Foo'): (False),
('^(?!.*fancy).*$'): (False),
('^(?!.*funny).*$'): (True),
('(?=.*f.*)(?=.{0,2}|.{4,})'): (True),
('f{4,}'): (False),
('^[^A-Z]*$'): (True),
('^[^a-z]*$'): (False),
('^[^\\s]*$'): (False)
}
for params, expected in expected_matches.items():
self.source.filter_text = params
expected_match_result = expected
self.assertEqual(self.source.is_regex_match(self.media.title), expected_match_result)
class TasksTestCase(TestCase):
def setUp(self):
# Disable general logging for test case
logging.disable(logging.CRITICAL)
def test_delete_old_media(self):
src1 = Source.objects.create(key='aaa', name='aaa', directory='/tmp/a', delete_old_media=False, days_to_keep=14)
src2 = Source.objects.create(key='bbb', name='bbb', directory='/tmp/b', delete_old_media=True, days_to_keep=14)
now = timezone.now()
m11 = Media.objects.create(source=src1, downloaded=True, key='a11', download_date=now - timedelta(days=5))
m12 = Media.objects.create(source=src1, downloaded=True, key='a12', download_date=now - timedelta(days=25))
m13 = Media.objects.create(source=src1, downloaded=False, key='a13')
m21 = Media.objects.create(source=src2, downloaded=True, key='a21', download_date=now - timedelta(days=5))
m22 = Media.objects.create(source=src2, downloaded=True, key='a22', download_date=now - timedelta(days=25))
m23 = Media.objects.create(source=src2, downloaded=False, key='a23')
self.assertEquals(src1.media_source.all().count(), 3)
self.assertEquals(src2.media_source.all().count(), 3)
cleanup_old_media()
self.assertEquals(src1.media_source.all().count(), 3)
self.assertEquals(src2.media_source.all().count(), 2)
self.assertEquals(Media.objects.filter(pk=m22.pk).exists(), False)

View File

@@ -2,7 +2,7 @@ from django.urls import path
from .views import (DashboardView, SourcesView, ValidateSourceView, AddSourceView,
SourceView, UpdateSourceView, DeleteSourceView, MediaView,
MediaThumbView, MediaItemView, MediaRedownloadView, MediaSkipView,
MediaEnableView, TasksView, CompletedTasksView, ResetTasks,
MediaEnableView, MediaContent, TasksView, CompletedTasksView, ResetTasks,
MediaServersView, AddMediaServerView, MediaServerView,
DeleteMediaServerView, UpdateMediaServerView)
@@ -28,6 +28,10 @@ urlpatterns = [
ValidateSourceView.as_view(),
name='validate-source'),
path('source-sync-now/<uuid:pk>',
SourcesView.as_view(),
name='source-sync-now'),
path('source-add',
AddSourceView.as_view(),
name='add-source'),
@@ -70,6 +74,10 @@ urlpatterns = [
MediaEnableView.as_view(),
name='enable-media'),
path('media-content/<uuid:pk>',
MediaContent.as_view(),
name='media-content'),
# Task URLs
path('tasks',

View File

@@ -14,9 +14,9 @@ def validate_url(url, validator):
Validate a URL against a dict of validation requirements. Returns an extracted
part of the URL if the URL is valid, if invalid raises a ValidationError.
'''
valid_scheme, valid_netloc, valid_path, invalid_paths, valid_query, \
valid_scheme, valid_netlocs, valid_path, invalid_paths, valid_query, \
extract_parts = (
validator['scheme'], validator['domain'], validator['path_regex'],
validator['scheme'], validator['domains'], validator['path_regex'],
validator['path_must_not_match'], validator['qs_args'],
validator['extract_key']
)
@@ -25,8 +25,8 @@ def validate_url(url, validator):
if url_scheme != valid_scheme:
raise ValidationError(f'invalid scheme "{url_scheme}" must be "{valid_scheme}"')
url_netloc = str(url_parts.netloc).strip().lower()
if url_netloc != valid_netloc:
raise ValidationError(f'invalid domain "{url_netloc}" must be "{valid_netloc}"')
if url_netloc not in valid_netlocs:
raise ValidationError(f'invalid domain "{url_netloc}" must be one of "{valid_netlocs}"')
url_path = str(url_parts.path).strip()
matches = re.findall(valid_path, url_path)
if not matches:
@@ -78,7 +78,7 @@ def resize_image_to_height(image, width, height):
if scaled_width < width:
# Width too small, stretch it
scaled_width = width
image = image.resize((scaled_width, height), Image.ANTIALIAS)
image = image.resize((scaled_width, height), Image.LANCZOS)
if scaled_width > width:
# Width too large, crop it
delta = scaled_width - width

View File

@@ -1,18 +1,24 @@
import glob
import os
import json
from base64 import b64decode
import pathlib
import shutil
import sys
from django.conf import settings
from django.http import Http404
from django.http import FileResponse, Http404, HttpResponseNotFound, HttpResponseRedirect
from django.views.generic import TemplateView, ListView, DetailView
from django.views.generic.edit import (FormView, FormMixin, CreateView, UpdateView,
DeleteView)
from django.views.generic.detail import SingleObjectMixin
from django.core.exceptions import SuspiciousFileOperation
from django.http import HttpResponse
from django.urls import reverse_lazy
from django.db import IntegrityError
from django.db.models import Q, Count, Sum, When, Case
from django.forms import ValidationError
from django.forms import Form, ValidationError
from django.utils.text import slugify
from django.utils._os import safe_join
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from common.utils import append_uri_params
@@ -55,7 +61,7 @@ class DashboardView(TemplateView):
# Disk usage
disk_usage = Media.objects.filter(
downloaded=True, downloaded_filesize__isnull=False
).aggregate(Sum('downloaded_filesize'))
).defer('metadata').aggregate(Sum('downloaded_filesize'))
data['disk_usage_bytes'] = disk_usage['downloaded_filesize__sum']
if not data['disk_usage_bytes']:
data['disk_usage_bytes'] = 0
@@ -66,12 +72,12 @@ class DashboardView(TemplateView):
data['average_bytes_per_media'] = 0
# Latest downloads
data['latest_downloads'] = Media.objects.filter(
downloaded=True
).order_by('-download_date')[:10]
downloaded=True, downloaded_filesize__isnull=False
).defer('metadata').order_by('-download_date')[:10]
# Largest downloads
data['largest_downloads'] = Media.objects.filter(
downloaded=True, downloaded_filesize__isnull=False
).order_by('-downloaded_filesize')[:10]
).defer('metadata').order_by('-downloaded_filesize')[:10]
# UID and GID
data['uid'] = os.getuid()
data['gid'] = os.getgid()
@@ -92,8 +98,27 @@ class SourcesView(ListView):
paginate_by = settings.SOURCES_PER_PAGE
messages = {
'source-deleted': _('Your selected source has been deleted.'),
'source-refreshed': _('The source has been scheduled to be synced now.')
}
def get(self, *args, **kwargs):
if args[0].path.startswith("/source-sync-now/"):
sobj = Source.objects.get(pk=kwargs["pk"])
if sobj is None:
return HttpResponseNotFound()
verbose_name = _('Index media from source "{}" once')
index_source_task(
str(sobj.pk),
queue=str(sobj.pk),
repeat=0,
verbose_name=verbose_name.format(sobj.name))
url = reverse_lazy('sync:sources')
url = append_uri_params(url, {'message': 'source-refreshed'})
return HttpResponseRedirect(url)
else:
return super().get(self, *args, **kwargs)
def __init__(self, *args, **kwargs):
self.message = None
super().__init__(*args, **kwargs)
@@ -171,7 +196,7 @@ class ValidateSourceView(FormView):
validation_urls = {
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: {
'scheme': 'https',
'domain': 'www.youtube.com',
'domains': ('m.youtube.com', 'www.youtube.com'),
'path_regex': '^\/(c\/)?([^\/]+)(\/videos)?$',
'path_must_not_match': ('/playlist', '/c/playlist'),
'qs_args': [],
@@ -180,7 +205,7 @@ class ValidateSourceView(FormView):
},
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: {
'scheme': 'https',
'domain': 'www.youtube.com',
'domains': ('m.youtube.com', 'www.youtube.com'),
'path_regex': '^\/channel\/([^\/]+)(\/videos)?$',
'path_must_not_match': ('/playlist', '/c/playlist'),
'qs_args': [],
@@ -189,7 +214,7 @@ class ValidateSourceView(FormView):
},
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: {
'scheme': 'https',
'domain': 'www.youtube.com',
'domains': ('m.youtube.com', 'www.youtube.com'),
'path_regex': '^\/(playlist|watch)$',
'path_must_not_match': (),
'qs_args': ('list',),
@@ -269,23 +294,58 @@ class ValidateSourceView(FormView):
return append_uri_params(url, fields)
class AddSourceView(CreateView):
class EditSourceMixin:
model = Source
fields = ('source_type', 'key', 'name', 'directory', 'filter_text', 'media_format',
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
'delete_removed_media', 'delete_files_on_disk', 'days_to_keep', 'source_resolution',
'source_vcodec', 'source_acodec', 'prefer_60fps', 'prefer_hdr', 'fallback',
'copy_thumbnails', 'write_nfo', 'write_json', 'embed_metadata', 'embed_thumbnail',
'enable_sponsorblock', 'sponsorblock_categories', 'write_subtitles',
'auto_subtitles', 'sub_langs')
errors = {
'invalid_media_format': _('Invalid media format, the media format contains '
'errors or is empty. Check the table at the end of '
'this page for valid media name variables'),
'dir_outside_dlroot': _('You cannot specify a directory outside of the '
'base directory (%BASEDIR%)')
}
def form_valid(self, form: Form):
# Perform extra validation to make sure the media_format is valid
obj = form.save(commit=False)
source_type = form.cleaned_data['media_format']
example_media_file = obj.get_example_media_format()
if example_media_file == '':
form.add_error(
'media_format',
ValidationError(self.errors['invalid_media_format'])
)
# Check for suspicious file path(s)
try:
targetCheck = form.cleaned_data['directory']+"/.virt"
newdir = safe_join(settings.DOWNLOAD_ROOT,targetCheck)
except SuspiciousFileOperation:
form.add_error(
'directory',
ValidationError(self.errors['dir_outside_dlroot'].replace("%BASEDIR%",str(settings.DOWNLOAD_ROOT)))
)
if form.errors:
return super().form_invalid(form)
return super().form_valid(form)
class AddSourceView(EditSourceMixin, CreateView):
'''
Adds a new source, optionally takes some initial data querystring values to
prepopulate some of the more unclear values.
'''
template_name = 'sync/source-add.html'
model = Source
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo', 'write_json')
errors = {
'invalid_media_format': _('Invalid media format, the media format contains '
'errors or is empty. Check the table at the end of '
'this page for valid media name variables'),
}
def __init__(self, *args, **kwargs):
self.prepopulated_data = {}
@@ -312,20 +372,6 @@ class AddSourceView(CreateView):
initial[k] = v
return initial
def form_valid(self, form):
# Perform extra validation to make sure the media_format is valid
obj = form.save(commit=False)
source_type = form.cleaned_data['media_format']
example_media_file = obj.get_example_media_format()
if example_media_file == '':
form.add_error(
'media_format',
ValidationError(self.errors['invalid_media_format'])
)
if form.errors:
return super().form_invalid(form)
return super().form_valid(form)
def get_success_url(self):
url = reverse_lazy('sync:source', kwargs={'pk': self.object.pk})
return append_uri_params(url, {'message': 'source-created'})
@@ -360,37 +406,13 @@ class SourceView(DetailView):
error_message = get_error_message(error)
setattr(error, 'error_message', error_message)
data['errors'].append(error)
data['media'] = Media.objects.filter(source=self.object).order_by('-published')
data['media'] = Media.objects.filter(source=self.object).order_by('-published').defer('metadata')
return data
class UpdateSourceView(UpdateView):
class UpdateSourceView(EditSourceMixin, UpdateView):
template_name = 'sync/source-update.html'
model = Source
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo', 'write_json')
errors = {
'invalid_media_format': _('Invalid media format, the media format contains '
'errors or is empty. Check the table at the end of '
'this page for valid media name variables'),
}
def form_valid(self, form):
# Perform extra validation to make sure the media_format is valid
obj = form.save(commit=False)
source_type = form.cleaned_data['media_format']
example_media_file = obj.get_example_media_format()
if example_media_file == '':
form.add_error(
'media_format',
ValidationError(self.errors['invalid_media_format'])
)
if form.errors:
return super().form_invalid(form)
return super().form_valid(form)
def get_success_url(self):
url = reverse_lazy('sync:source', kwargs={'pk': self.object.pk})
@@ -415,14 +437,13 @@ class DeleteSourceView(DeleteView, FormMixin):
source = self.get_object()
for media in Media.objects.filter(source=source):
if media.media_file:
# Delete the media file
delete_file(media.media_file.name)
# Delete thumbnail copy if it exists
delete_file(media.thumbpath)
# Delete NFO file if it exists
delete_file(media.nfopath)
# Delete JSON file if it exists
delete_file(media.jsonpath)
file_path = media.media_file.path
matching_files = glob.glob(os.path.splitext(file_path)[0] + '.*')
for file in matching_files:
delete_file(file)
directory_path = source.directory_path
if os.path.exists(directory_path):
shutil.rmtree(directory_path, True)
return super().post(request, *args, **kwargs)
def get_success_url(self):
@@ -469,16 +490,16 @@ class MediaView(ListView):
if self.show_skipped:
q = Media.objects.filter(source=self.filter_source)
elif self.only_skipped:
q = Media.objects.filter(source=self.filter_source, skip=True)
q = Media.objects.filter(Q(source=self.filter_source) & (Q(skip=True) | Q(manual_skip=True)))
else:
q = Media.objects.filter(source=self.filter_source, skip=False)
q = Media.objects.filter(Q(source=self.filter_source) & (Q(skip=False) & Q(manual_skip=False)))
else:
if self.show_skipped:
q = Media.objects.all()
elif self.only_skipped:
q = Media.objects.filter(skip=True)
q = Media.objects.filter(Q(skip=True)|Q(manual_skip=True))
else:
q = Media.objects.filter(skip=False)
q = Media.objects.filter(Q(skip=False)&Q(manual_skip=False))
return q.order_by('-published', '-created')
def get_context_data(self, *args, **kwargs):
@@ -633,12 +654,13 @@ class MediaSkipView(FormView, SingleObjectMixin):
delete_task_by_media('sync.tasks.download_media', (str(self.object.pk),))
# If the media file exists on disk, delete it
if self.object.media_file_exists:
delete_file(self.object.media_file.path)
self.object.media_file = None
# If the media has an associated thumbnail copied, also delete it
delete_file(self.object.thumbpath)
# If the media has an associated NFO file with it, also delete it
delete_file(self.object.nfopath)
# Delete all files which contains filename
filepath = self.object.media_file.path
barefilepath, fileext = os.path.splitext(filepath)
# Get all files that start with the bare file path
all_related_files = glob.glob(f'{barefilepath}.*')
for file in all_related_files:
delete_file(file)
# Reset all download data
self.object.metadata = None
self.object.downloaded = False
@@ -650,6 +672,7 @@ class MediaSkipView(FormView, SingleObjectMixin):
self.object.downloaded_filesize = None
# Mark it to be skipped
self.object.skip = True
self.object.manual_skip = True
self.object.save()
return super().form_valid(form)
@@ -678,6 +701,7 @@ class MediaEnableView(FormView, SingleObjectMixin):
def form_valid(self, form):
# Mark it as not skipped
self.object.skip = False
self.object.manual_skip = False
self.object.save()
return super().form_valid(form)
@@ -686,6 +710,52 @@ class MediaEnableView(FormView, SingleObjectMixin):
return append_uri_params(url, {'message': 'enabled'})
class MediaContent(DetailView):
'''
Redirect to nginx to download the file
'''
model = Media
def __init__(self, *args, **kwargs):
self.object = None
super().__init__(*args, **kwargs)
def dispatch(self, request, *args, **kwargs):
self.object = self.get_object()
# development direct file stream - DO NOT USE PRODUCTIVLY
if settings.DEBUG and 'runserver' in sys.argv:
# get media URL
pth = self.object.media_file.url
# remove "/media-data/"
pth = pth.split("/media-data/",1)[1]
# remove "/" (incase of absolute path)
pth = pth.split(str(settings.DOWNLOAD_ROOT).lstrip("/"),1)
# if we do not have a "/" at the beginning, it is not a absolute path...
if len(pth) > 1:
pth = pth[1]
else:
pth = pth[0]
# build final path
filepth = pathlib.Path(str(settings.DOWNLOAD_ROOT) + pth)
if filepth.exists():
# return file
response = FileResponse(open(filepth,'rb'))
return response
else:
return HttpResponseNotFound()
else:
headers = {
'Content-Type': self.object.content_type,
'X-Accel-Redirect': self.object.media_file.url,
}
return HttpResponse(headers=headers)
class TasksView(ListView):
'''
A list of tasks queued to be completed. This is, for example, scraping for new

View File

@@ -1,5 +1,5 @@
'''
Wrapper for the youtube-dl library. Used so if there are any library interface
Wrapper for the yt-dlp library. Used so if there are any library interface
updates we only need to udpate them in one place.
'''
@@ -64,13 +64,20 @@ def get_media_info(url):
return response
def download_media(url, media_format, extension, output_file, info_json):
def download_media(url, media_format, extension, output_file, info_json,
sponsor_categories=None,
embed_thumbnail=False, embed_metadata=False, skip_sponsors=True,
write_subtitles=False, auto_subtitles=False, sub_langs='en'):
'''
Downloads a YouTube URL to a file on disk.
'''
def hook(event):
filename = os.path.basename(event['filename'])
if event.get('downloaded_bytes') is None or event.get('total_bytes') is None:
return None
if event['status'] == 'error':
log.error(f'[youtube-dl] error occured downloading: {filename}')
elif event['status'] == 'downloading':
@@ -99,17 +106,39 @@ def download_media(url, media_format, extension, output_file, info_json):
f'{total_size_str} in {elapsed_str}')
else:
log.warn(f'[youtube-dl] unknown event: {str(event)}')
hook.download_progress = 0
opts = get_yt_opts()
opts.update({
hook.download_progress = 0
ytopts = {
'format': media_format,
'merge_output_format': extension,
'outtmpl': output_file,
'quiet': True,
'progress_hooks': [hook],
'writeinfojson': info_json
})
'writeinfojson': info_json,
'postprocessors': [],
'writesubtitles': write_subtitles,
'writeautomaticsub': auto_subtitles,
'subtitleslangs': sub_langs.split(','),
}
if not sponsor_categories:
sponsor_categories = []
sbopt = {
'key': 'SponsorBlock',
'categories': sponsor_categories
}
ffmdopt = {
'key': 'FFmpegMetadata',
'add_chapters': embed_metadata,
'add_metadata': embed_metadata
}
opts = get_yt_opts()
if embed_thumbnail:
ytopts['postprocessors'].append({'key': 'EmbedThumbnail'})
if skip_sponsors:
ytopts['postprocessors'].append(sbopt)
ytopts['postprocessors'].append(ffmdopt)
opts.update(ytopts)
with yt_dlp.YoutubeDL(opts) as y:
try:
return y.download([url])

View File

@@ -0,0 +1,19 @@
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tubesync.settings')
REDIS_CONNECTION = os.getenv('REDIS_CONNECTION', 'redis://localhost:6379/0')
app = Celery('tubesync')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
app.conf.broker_url = REDIS_CONNECTION
app.conf.beat_schedule = {
'10-second-beat': {
'task': 'sync.tasks.housekeeping_task',
'schedule': 60.0,
'args': ()
},
}

View File

@@ -1,5 +1,6 @@
import os
from pathlib import Path
from urllib.parse import urljoin
from common.logger import log
from common.utils import parse_database_connection_string
@@ -8,6 +9,10 @@ BASE_DIR = Path(__file__).resolve().parent.parent
ROOT_DIR = Path('/')
CONFIG_BASE_DIR = ROOT_DIR / 'config'
DOWNLOADS_BASE_DIR = ROOT_DIR / 'downloads'
DJANGO_URL_PREFIX = os.getenv('DJANGO_URL_PREFIX', None)
STATIC_URL = str(os.getenv('DJANGO_STATIC_URL', '/static/'))
if DJANGO_URL_PREFIX and STATIC_URL:
STATIC_URL = urljoin(DJANGO_URL_PREFIX, STATIC_URL[1:])
# This is not ever meant to be a public web interface so this isn't too critical
@@ -17,10 +22,7 @@ SECRET_KEY = str(os.getenv('DJANGO_SECRET_KEY', 'tubesync-django-secret'))
ALLOWED_HOSTS_STR = str(os.getenv('TUBESYNC_HOSTS', '*'))
ALLOWED_HOSTS = ALLOWED_HOSTS_STR.split(',')
DEBUG = True if os.getenv('TUBESYNC_DEBUG', False) else False
FORCE_SCRIPT_NAME = os.getenv('DJANGO_FORCE_SCRIPT_NAME', None)
TIME_ZONE = os.getenv('TZ', 'UTC')
FORCE_SCRIPT_NAME = os.getenv('DJANGO_FORCE_SCRIPT_NAME', DJANGO_URL_PREFIX)
database_dict = {}
@@ -59,6 +61,13 @@ if BACKGROUND_TASK_ASYNC_THREADS > MAX_BACKGROUND_TASK_ASYNC_THREADS:
MEDIA_ROOT = CONFIG_BASE_DIR / 'media'
DOWNLOAD_ROOT = DOWNLOADS_BASE_DIR
YOUTUBE_DL_CACHEDIR = CONFIG_BASE_DIR / 'cache'
COOKIES_FILE = CONFIG_BASE_DIR / 'cookies.txt'
HEALTHCHECK_FIREWALL_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_FIREWAL', 'True')).strip().lower()
HEALTHCHECK_FIREWALL = True if HEALTHCHECK_FIREWALL_STR == 'true' else False
HEALTHCHECK_ALLOWED_IPS_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_ALLOWED_IPS', '127.0.0.1'))
HEALTHCHECK_ALLOWED_IPS = HEALTHCHECK_ALLOWED_IPS_STR.split(',')
BASICAUTH_USERNAME = os.getenv('HTTP_USER', '').strip()

View File

@@ -1,3 +1,4 @@
import os
from pathlib import Path
@@ -6,7 +7,7 @@ CONFIG_BASE_DIR = BASE_DIR
DOWNLOADS_BASE_DIR = BASE_DIR
VERSION = '0.11.0'
VERSION = '0.13.3'
SECRET_KEY = ''
DEBUG = False
ALLOWED_HOSTS = []
@@ -96,7 +97,7 @@ AUTH_PASSWORD_VALIDATORS = [
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
TIME_ZONE = os.getenv('TZ', 'UTC')
USE_I18N = True
USE_L10N = True
USE_TZ = True
@@ -138,7 +139,7 @@ BACKGROUND_TASK_ASYNC_THREADS = 1 # Number of async tasks to run at on
MAX_BACKGROUND_TASK_ASYNC_THREADS = 8 # For sanity reasons
BACKGROUND_TASK_PRIORITY_ORDERING = 'ASC' # Use 'niceness' task priority ordering
COMPLETED_TASKS_DAYS_TO_KEEP = 7 # Number of days to keep completed tasks
MAX_ENTRIES_PROCESSING = 0 # Number of videos to process on source refresh (0 for no limit)
SOURCES_PER_PAGE = 100
MEDIA_PER_PAGE = 144

View File

@@ -1,6 +1,25 @@
import os
from urllib.parse import urljoin
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tubesync.settings')
application = get_wsgi_application()
DJANGO_URL_PREFIX = os.getenv('DJANGO_URL_PREFIX', None)
_application = get_wsgi_application()
def application(environ, start_response):
script_name = None
if DJANGO_URL_PREFIX:
if DJANGO_URL_PREFIX.endswith('/'):
script_name = DJANGO_URL_PREFIX
else:
raise Exception(f'DJANGO_URL_PREFIX must end with a /, '
f'got: {DJANGO_URL_PREFIX}')
if script_name:
static_url = urljoin(script_name, 'static/')
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name) and not path_info.startswith(static_url):
environ['PATH_INFO'] = path_info[len(script_name) - 1:]
return _application(environ, start_response)