Compare commits
43 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c1c45e829 | ||
|
|
c64f54bcb4 | ||
|
|
6ce55b0337 | ||
|
|
d06c4beae0 | ||
|
|
db651e16b9 | ||
|
|
86068790ed | ||
|
|
ea72671351 | ||
|
|
96b9eddf43 | ||
|
|
bceefc8b01 | ||
|
|
820cc69937 | ||
|
|
1e8711be51 | ||
|
|
e3423bc2d2 | ||
|
|
6fbf72d0e7 | ||
|
|
d6852bf828 | ||
|
|
f6f4f244d7 | ||
|
|
df35aa2a5f | ||
|
|
799c0fce39 | ||
|
|
2f324f28a9 | ||
|
|
895bfe6f87 | ||
|
|
e0669b107d | ||
|
|
0dc201b293 | ||
|
|
82fa0f6bce | ||
|
|
8b93cb4a59 | ||
|
|
647254d7f7 | ||
|
|
3567e20600 | ||
|
|
5348e25303 | ||
|
|
749df3f7bb | ||
|
|
2c2f53e5b2 | ||
|
|
06cfafb803 | ||
|
|
f5a37f2e86 | ||
|
|
36747a47e0 | ||
|
|
ffd69e8d40 | ||
|
|
eebef3371f | ||
|
|
4cd6701c8a | ||
|
|
4ebe6f2a37 | ||
|
|
d553d58fde | ||
|
|
df40a1367a | ||
|
|
607ee77e70 | ||
|
|
9af493aa8a | ||
|
|
f0c94ff789 | ||
|
|
39c7799831 | ||
|
|
da7371f830 | ||
|
|
387cfefc8f |
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
github: [meeb]
|
||||||
1
Pipfile
1
Pipfile
@@ -17,6 +17,7 @@ httptools = "*"
|
|||||||
youtube-dl = "*"
|
youtube-dl = "*"
|
||||||
django-background-tasks = "*"
|
django-background-tasks = "*"
|
||||||
requests = "*"
|
requests = "*"
|
||||||
|
django-basicauth = "*"
|
||||||
|
|
||||||
[requires]
|
[requires]
|
||||||
python_version = "3"
|
python_version = "3"
|
||||||
|
|||||||
97
Pipfile.lock
generated
97
Pipfile.lock
generated
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"_meta": {
|
"_meta": {
|
||||||
"hash": {
|
"hash": {
|
||||||
"sha256": "a4bb556fc61ee4583f9588980450b071814298ee4d1a1023fad149c14d14aaba"
|
"sha256": "f698e2853dec2d325d2d7e752620fc81d911022d394a57f2f8a9349ac2682752"
|
||||||
},
|
},
|
||||||
"pipfile-spec": 6,
|
"pipfile-spec": 6,
|
||||||
"requires": {
|
"requires": {
|
||||||
@@ -39,11 +39,11 @@
|
|||||||
},
|
},
|
||||||
"django": {
|
"django": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:5c866205f15e7a7123f1eec6ab939d22d5bde1416635cab259684af66d8e48a2",
|
"sha256:32ce792ee9b6a0cbbec340123e229ac9f765dff8c2a4ae9247a14b2ba3a365a7",
|
||||||
"sha256:edb10b5c45e7e9c0fb1dc00b76ec7449aca258a39ffd613dbd078c51d19c9f03"
|
"sha256:baf099db36ad31f970775d0be5587cc58a6256a6771a44eb795b554d45f211b8"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.1.4"
|
"version": "==3.1.7"
|
||||||
},
|
},
|
||||||
"django-appconf": {
|
"django-appconf": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -59,6 +59,14 @@
|
|||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.2.5"
|
"version": "==1.2.5"
|
||||||
},
|
},
|
||||||
|
"django-basicauth": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:15e9e366f698f53c71b1e794dafea060f990a2ac556bae6b7330dd25324a091c",
|
||||||
|
"sha256:e5e47d1acdc1943bedcc1bf673059d6c15e257dfe9eef67a22fb824f79546c0d"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==0.5.3"
|
||||||
|
},
|
||||||
"django-compat": {
|
"django-compat": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:3ac9a3bedc56b9365d9eb241bc5157d0c193769bf995f9a78dc1bc24e7c2331b"
|
"sha256:3ac9a3bedc56b9365d9eb241bc5157d0c193769bf995f9a78dc1bc24e7c2331b"
|
||||||
@@ -134,44 +142,49 @@
|
|||||||
},
|
},
|
||||||
"pillow": {
|
"pillow": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:006de60d7580d81f4a1a7e9f0173dc90a932e3905cc4d47ea909bc946302311a",
|
"sha256:01bb0a34f1a6689b138c0089d670ae2e8f886d2666a9b2f2019031abdea673c4",
|
||||||
"sha256:0a2e8d03787ec7ad71dc18aec9367c946ef8ef50e1e78c71f743bc3a770f9fae",
|
"sha256:07872f1d8421db5a3fe770f7480835e5e90fddb58f36c216d4a2ac0d594de474",
|
||||||
"sha256:0eeeae397e5a79dc088d8297a4c2c6f901f8fb30db47795113a4a605d0f1e5ce",
|
"sha256:1022f8f6dc3c5b0dcf928f1c49ba2ac73051f576af100d57776e2b65c1f76a8d",
|
||||||
"sha256:11c5c6e9b02c9dac08af04f093eb5a2f84857df70a7d4a6a6ad461aca803fb9e",
|
"sha256:14415e9e28410232370615dbde0cf0a00e526f522f665460344a5b96973a3086",
|
||||||
"sha256:2fb113757a369a6cdb189f8df3226e995acfed0a8919a72416626af1a0a71140",
|
"sha256:172acfaf00434a28dddfe592d83f2980e22e63c769ff4a448ddf7b7a38ffd165",
|
||||||
"sha256:4b0ef2470c4979e345e4e0cc1bbac65fda11d0d7b789dbac035e4c6ce3f98adb",
|
"sha256:1c5e3c36f02c815766ae9dd91899b1c5b4652f2a37b7a51609f3bd467c0f11fb",
|
||||||
"sha256:59e903ca800c8cfd1ebe482349ec7c35687b95e98cefae213e271c8c7fffa021",
|
"sha256:292f2aa1ae5c5c1451cb4b558addb88c257411d3fd71c6cf45562911baffc979",
|
||||||
"sha256:5abd653a23c35d980b332bc0431d39663b1709d64142e3652890df4c9b6970f6",
|
"sha256:2a40d7d4b17db87f5b9a1efc0aff56000e1d0d5ece415090c102aafa0ccbe858",
|
||||||
"sha256:5f9403af9c790cc18411ea398a6950ee2def2a830ad0cfe6dc9122e6d528b302",
|
"sha256:2f0d7034d5faae9a8d1019d152ede924f653df2ce77d3bba4ce62cd21b5f94ae",
|
||||||
"sha256:6b4a8fd632b4ebee28282a9fef4c341835a1aa8671e2770b6f89adc8e8c2703c",
|
"sha256:33fdbd4f5608c852d97264f9d2e3b54e9e9959083d008145175b86100b275e5b",
|
||||||
"sha256:6c1aca8231625115104a06e4389fcd9ec88f0c9befbabd80dc206c35561be271",
|
"sha256:3b13d89d97b551e02549d1f0edf22bed6acfd6fd2e888cd1e9a953bf215f0e81",
|
||||||
"sha256:795e91a60f291e75de2e20e6bdd67770f793c8605b553cb6e4387ce0cb302e09",
|
"sha256:3e759bcc03d6f39bc751e56d86bc87252b9a21c689a27c5ed753717a87d53a5b",
|
||||||
"sha256:7ba0ba61252ab23052e642abdb17fd08fdcfdbbf3b74c969a30c58ac1ade7cd3",
|
"sha256:3ec87bd1248b23a2e4e19e774367fbe30fddc73913edc5f9b37470624f55dc1f",
|
||||||
"sha256:7c9401e68730d6c4245b8e361d3d13e1035cbc94db86b49dc7da8bec235d0015",
|
"sha256:436b0a2dd9fe3f7aa6a444af6bdf53c1eb8f5ced9ea3ef104daa83f0ea18e7bc",
|
||||||
"sha256:81f812d8f5e8a09b246515fac141e9d10113229bc33ea073fec11403b016bcf3",
|
"sha256:43b3c859912e8bf754b3c5142df624794b18eb7ae07cfeddc917e1a9406a3ef2",
|
||||||
"sha256:895d54c0ddc78a478c80f9c438579ac15f3e27bf442c2a9aa74d41d0e4d12544",
|
"sha256:4fe74636ee71c57a7f65d7b21a9f127d842b4fb75511e5d256ace258826eb352",
|
||||||
"sha256:8de332053707c80963b589b22f8e0229f1be1f3ca862a932c1bcd48dafb18dd8",
|
"sha256:59445af66b59cc39530b4f810776928d75e95f41e945f0c32a3de4aceb93c15d",
|
||||||
"sha256:92c882b70a40c79de9f5294dc99390671e07fc0b0113d472cbea3fde15db1792",
|
"sha256:69da5b1d7102a61ce9b45deb2920a2012d52fd8f4201495ea9411d0071b0ec22",
|
||||||
"sha256:95edb1ed513e68bddc2aee3de66ceaf743590bf16c023fb9977adc4be15bd3f0",
|
"sha256:7094bbdecb95ebe53166e4c12cf5e28310c2b550b08c07c5dc15433898e2238e",
|
||||||
"sha256:b63d4ff734263ae4ce6593798bcfee6dbfb00523c82753a3a03cbc05555a9cc3",
|
"sha256:8211cac9bf10461f9e33fe9a3af6c5131f3fdd0d10672afc2abb2c70cf95c5ca",
|
||||||
"sha256:bd7bf289e05470b1bc74889d1466d9ad4a56d201f24397557b6f65c24a6844b8",
|
"sha256:8cf77e458bd996dc85455f10fe443c0c946f5b13253773439bcbec08aa1aebc2",
|
||||||
"sha256:cc3ea6b23954da84dbee8025c616040d9aa5eaf34ea6895a0a762ee9d3e12e11",
|
"sha256:924fc33cb4acaf6267b8ca3b8f1922620d57a28470d5e4f49672cea9a841eb08",
|
||||||
"sha256:cc9ec588c6ef3a1325fa032ec14d97b7309db493782ea8c304666fb10c3bd9a7",
|
"sha256:99ce3333b40b7a4435e0a18baad468d44ab118a4b1da0af0a888893d03253f1d",
|
||||||
"sha256:d3d07c86d4efa1facdf32aa878bd508c0dc4f87c48125cc16b937baa4e5b5e11",
|
"sha256:a7d690b2c5f7e4a932374615fedceb1e305d2dd5363c1de15961725fe10e7d16",
|
||||||
"sha256:d8a96747df78cda35980905bf26e72960cba6d355ace4780d4bdde3b217cdf1e",
|
"sha256:b9af590adc1e46898a1276527f3cfe2da8048ae43fbbf9b1bf9395f6c99d9b47",
|
||||||
"sha256:e38d58d9138ef972fceb7aeec4be02e3f01d383723965bfcef14d174c8ccd039",
|
"sha256:bb18422ad00c1fecc731d06592e99c3be2c634da19e26942ba2f13d805005cf2",
|
||||||
"sha256:eb472586374dc66b31e36e14720747595c2b265ae962987261f044e5cce644b5",
|
"sha256:c10af40ee2f1a99e1ae755ab1f773916e8bca3364029a042cd9161c400416bd8",
|
||||||
"sha256:fbd922f702582cb0d71ef94442bfca57624352622d75e3be7a1e7e9360b07e72"
|
"sha256:c143c409e7bc1db784471fe9d0bf95f37c4458e879ad84cfae640cb74ee11a26",
|
||||||
|
"sha256:c448d2b335e21951416a30cd48d35588d122a912d5fe9e41900afacecc7d21a1",
|
||||||
|
"sha256:d30f30c044bdc0ab8f3924e1eeaac87e0ff8a27e87369c5cac4064b6ec78fd83",
|
||||||
|
"sha256:df534e64d4f3e84e8f1e1a37da3f541555d947c1c1c09b32178537f0f243f69d",
|
||||||
|
"sha256:f6fc18f9c9c7959bf58e6faf801d14fafb6d4717faaf6f79a68c8bb2a13dcf20",
|
||||||
|
"sha256:ff83dfeb04c98bb3e7948f876c17513a34e9a19fd92e292288649164924c1b39"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==8.0.1"
|
"version": "==8.1.1"
|
||||||
},
|
},
|
||||||
"pytz": {
|
"pytz": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
|
"sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da",
|
||||||
"sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
|
"sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"
|
||||||
],
|
],
|
||||||
"version": "==2020.4"
|
"version": "==2021.1"
|
||||||
},
|
},
|
||||||
"rcssmin": {
|
"rcssmin": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -221,10 +234,10 @@
|
|||||||
},
|
},
|
||||||
"urllib3": {
|
"urllib3": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
|
"sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80",
|
||||||
"sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
|
"sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"
|
||||||
],
|
],
|
||||||
"version": "==1.26.2"
|
"version": "==1.26.3"
|
||||||
},
|
},
|
||||||
"whitenoise": {
|
"whitenoise": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@@ -236,11 +249,11 @@
|
|||||||
},
|
},
|
||||||
"youtube-dl": {
|
"youtube-dl": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:65968065e66966955dc79fad9251565fcc982566118756da624bd21467f3a04c",
|
"sha256:02432aa2dd0e859e64d74fca2ad624abf3bead3dba811d594100e1cb7897dce7",
|
||||||
"sha256:eaa859f15b6897bec21474b7787dc958118c8088e1f24d4ef1d58eab13188958"
|
"sha256:28663ce51bb35d0a0fa764aed3492b38c570da0a5a62fef3c28f4431522a6d4a"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==2020.12.14"
|
"version": "==2021.3.3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"develop": {}
|
"develop": {}
|
||||||
|
|||||||
63
README.md
63
README.md
@@ -22,7 +22,7 @@ hopefully, quite reliable.
|
|||||||
# Latest container image
|
# Latest container image
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ghcr.io/meeb/tubesync:v0.7
|
ghcr.io/meeb/tubesync:v0.9.1
|
||||||
```
|
```
|
||||||
|
|
||||||
**NOTE: the `:latest` tag does exist, but will contain in-development commits and may
|
**NOTE: the `:latest` tag does exist, but will contain in-development commits and may
|
||||||
@@ -102,7 +102,7 @@ Finally, download and run the container:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Pull a versioned image
|
# Pull a versioned image
|
||||||
$ docker pull ghcr.io/meeb/tubesync:v0.7
|
$ docker pull ghcr.io/meeb/tubesync:v0.9.1
|
||||||
# Start the container using your user ID and group ID
|
# Start the container using your user ID and group ID
|
||||||
$ docker run \
|
$ docker run \
|
||||||
-d \
|
-d \
|
||||||
@@ -113,7 +113,7 @@ $ docker run \
|
|||||||
-v /some/directory/tubesync-config:/config \
|
-v /some/directory/tubesync-config:/config \
|
||||||
-v /some/directory/tubesync-downloads:/downloads \
|
-v /some/directory/tubesync-downloads:/downloads \
|
||||||
-p 4848:4848 \
|
-p 4848:4848 \
|
||||||
ghcr.io/meeb/tubesync:v0.7
|
ghcr.io/meeb/tubesync:v0.9.1
|
||||||
```
|
```
|
||||||
|
|
||||||
Once running, open `http://localhost:4848` in your browser and you should see the
|
Once running, open `http://localhost:4848` in your browser and you should see the
|
||||||
@@ -125,7 +125,7 @@ Alternatively, for Docker Compose, you can use something like:
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
tubesync:
|
tubesync:
|
||||||
image: ghcr.io/meeb/tubesync:v0.7
|
image: ghcr.io/meeb/tubesync:v0.9.1
|
||||||
container_name: tubesync
|
container_name: tubesync
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
@@ -139,6 +139,41 @@ Alternatively, for Docker Compose, you can use something like:
|
|||||||
- PGID=1000
|
- PGID=1000
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Optional authentication
|
||||||
|
|
||||||
|
Available in `v1.0` (or `:latest`)and later. If you want to enable a basic username and
|
||||||
|
password to be required to access the TubeSync dashboard you can set them with the
|
||||||
|
following environment variables:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
HTTP_USER
|
||||||
|
HTTP_PASS
|
||||||
|
```
|
||||||
|
|
||||||
|
For example in the `docker run ...` line add in:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
...
|
||||||
|
-e HTTP_USER=some-username \
|
||||||
|
-e HTTP_PASS=some-secure-password \
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Or in your Docker Compose file you would add in:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
...
|
||||||
|
environment:
|
||||||
|
- HTTP_USER=some-username
|
||||||
|
- HTTP_PASS=some-secure-password
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
When BOTH `HTTP_USER` and `HTTP_PASS` are set then basic HTTP authentication will be
|
||||||
|
enabled.
|
||||||
|
|
||||||
|
|
||||||
# Updating
|
# Updating
|
||||||
|
|
||||||
To update, you can just pull a new version of the container image as they are released.
|
To update, you can just pull a new version of the container image as they are released.
|
||||||
@@ -196,6 +231,18 @@ $ docker logs --follow tubesync
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
# Advanced usage guides
|
||||||
|
|
||||||
|
Once you're happy using TubeSync there are some advanced usage guides for more complex
|
||||||
|
and less common features:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
# Warnings
|
# Warnings
|
||||||
|
|
||||||
### 1. Index frequency
|
### 1. Index frequency
|
||||||
@@ -290,7 +337,9 @@ can log in at http://localhost:4848/admin
|
|||||||
|
|
||||||
### Are there user accounts or multi-user support?
|
### Are there user accounts or multi-user support?
|
||||||
|
|
||||||
No not at the moment. This could be added later if there is demand for it.
|
There is support for basic HTTP authentication by setting the `HTTP_USER` and
|
||||||
|
`HTTP_PASS` environment variables. There is not support for multi-user or user
|
||||||
|
management.
|
||||||
|
|
||||||
### Does TubeSync support HTTPS?
|
### Does TubeSync support HTTPS?
|
||||||
|
|
||||||
@@ -318,6 +367,8 @@ useful if you are manually installing TubeSync in some other environment. These
|
|||||||
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
|
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
|
||||||
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
|
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
|
||||||
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
|
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
|
||||||
|
| HTTP_USER | Sets the username for HTTP basic authentication | some-username |
|
||||||
|
| HTTP_PASS | Sets the password for HTTP basic authentication | some-secure-password |
|
||||||
|
|
||||||
|
|
||||||
# Manual, non-containerised, installation
|
# Manual, non-containerised, installation
|
||||||
@@ -334,7 +385,7 @@ installing and running WSGI-based Python web applications before attempting this
|
|||||||
`tubesync/tubesync/local_settings.py` and edit it as appropriate
|
`tubesync/tubesync/local_settings.py` and edit it as appropriate
|
||||||
5. Run migrations with `./manage.py migrate`
|
5. Run migrations with `./manage.py migrate`
|
||||||
6. Collect static files with `./manage.py collectstatic`
|
6. Collect static files with `./manage.py collectstatic`
|
||||||
6. Set up your prefered WSGI server, such as `gunicorn` poiting it to the application
|
6. Set up your prefered WSGI server, such as `gunicorn` pointing it to the application
|
||||||
in `tubesync/tubesync/wsgi.py`
|
in `tubesync/tubesync/wsgi.py`
|
||||||
7. Set up your proxy server such as `nginx` and forward it to the WSGI server
|
7. Set up your proxy server such as `nginx` and forward it to the WSGI server
|
||||||
8. Check the web interface is working
|
8. Check the web interface is working
|
||||||
|
|||||||
@@ -19,8 +19,8 @@ chown -R app:app /app/common/static && \
|
|||||||
chmod -R 0750 /app/common/static && \
|
chmod -R 0750 /app/common/static && \
|
||||||
chown -R app:app /app/static && \
|
chown -R app:app /app/static && \
|
||||||
chmod -R 0750 /app/static && \
|
chmod -R 0750 /app/static && \
|
||||||
find /app -type f -exec chmod 640 {} \; && \
|
find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \; && \
|
||||||
chmod +x /app/healthcheck.py
|
chmod 0755 /app/healthcheck.py
|
||||||
|
|
||||||
# Run migrations
|
# Run migrations
|
||||||
exec s6-setuidgid app \
|
exec s6-setuidgid app \
|
||||||
|
|||||||
37
docs/create-missing-metadata.md
Normal file
37
docs/create-missing-metadata.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - creating missing metadata
|
||||||
|
|
||||||
|
This is a new feature in v0.9 of TubeSync and later. It allows you to create or
|
||||||
|
re-create missing metadata in your TubeSync download directories for missing `nfo`
|
||||||
|
files and thumbnails.
|
||||||
|
|
||||||
|
If you add a source with "write NFO files" or "copy thumbnails" disabled, download
|
||||||
|
some media and then update the source to write NFO files or copy thumbnails then
|
||||||
|
TubeSync will not automatically retroactively attempt to copy or create your missing
|
||||||
|
metadata files. You can use a special one-off command to manually write missing
|
||||||
|
metadata files to the correct locations.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
You have added a source without metadata writing enabled, downloaded some media, then
|
||||||
|
updated the source to enable metadata writing.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Run the batch metadata sync command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py sync-missing-metadata`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py sync-missing-metadata`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
Internally, this command loops over all your sources which have been saved with
|
||||||
|
"write NFO files" or "copy thumbnails" enabled. Then, loops over all media saved to
|
||||||
|
that source and confirms that the appropriate thumbnail files have been copied over and
|
||||||
|
the NFO file has been written if enabled.
|
||||||
81
docs/import-existing-media.md
Normal file
81
docs/import-existing-media.md
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - importing existing media
|
||||||
|
|
||||||
|
This is a new feature in v0.9 of TubeSync and later. It allows you to mark existing
|
||||||
|
downloaded media as "downloaded" in TubeSync. You can use this feature if, for example,
|
||||||
|
you already have an extensive catalogue of downloaded media which you want to mark
|
||||||
|
as downloaded into TubeSync so TubeSync doesn't re-download media you already have.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
Your existing downloaded media MUST contain the unique ID. For YouTube videos, this is
|
||||||
|
means the YouTube video ID MUST be in the filename.
|
||||||
|
|
||||||
|
Supported extensions to be imported are .m4a, .ogg, .mkv, .mp3, .mp4 and .avi. Your
|
||||||
|
media you want to import must end in one of these file extensions.
|
||||||
|
|
||||||
|
## Caveats
|
||||||
|
|
||||||
|
As TubeSync does not probe media and your existing media may be re-encoded or in
|
||||||
|
different formats to what is available in the current media metadata there is no way
|
||||||
|
for TubeSync to know what codecs, resolution, bitrate etc. your imported media is in.
|
||||||
|
Any manually imported existing local media will display blank boxes for this
|
||||||
|
information on the TubeSync interface as it's unavailable.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Add your source to TubeSync
|
||||||
|
|
||||||
|
Add your source to TubeSync, such as a YouTube channel. **Make sure you untick the
|
||||||
|
"download media" checkbox.**
|
||||||
|
|
||||||
|
This will allow TubeSync to index all the available media on your source, but won't
|
||||||
|
start downloading any media.
|
||||||
|
|
||||||
|
### 2. Wait
|
||||||
|
|
||||||
|
Wait for all the media on your source to be indexed. This may take some time.
|
||||||
|
|
||||||
|
### 3. Move your existing media into TubeSync
|
||||||
|
|
||||||
|
You now need to move your existing media into TubeSync. You need to move the media
|
||||||
|
files into the correct download directories created by TubeSync. For example, if you
|
||||||
|
have downloaded videos for a YouTube channel "TestChannel", you would have added this
|
||||||
|
as a source called TestChannel and in a directory called test-channel in Tubesync. It
|
||||||
|
would have a download directory created on disk at:
|
||||||
|
|
||||||
|
`/path/to/downloads/test-channel`
|
||||||
|
|
||||||
|
You would move all of your pre-existing videos you downloaded outside of TubeSync for
|
||||||
|
this channel into this directory.
|
||||||
|
|
||||||
|
In short, your existing media needs to be moved into the correct TubeSync source
|
||||||
|
directory to be detected.
|
||||||
|
|
||||||
|
This is required so TubeSync can known which Source to link the media to.
|
||||||
|
|
||||||
|
### 4. Run the batch import command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py import-existing-media`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py import-existing-media`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
Internally, `import-existing-media` looks for the unique media key (for YouTube, this
|
||||||
|
is the YouTube video ID) in the filename and detects the source to link it to based
|
||||||
|
on the directory the media file is inside.
|
||||||
|
|
||||||
|
|
||||||
|
### 5. Re-enable downloading at the source
|
||||||
|
|
||||||
|
Edit your source and re-enable / tick the "download media" option. This will allow
|
||||||
|
TubeSync to download any missing media you did not manually import.
|
||||||
|
|
||||||
|
Note that TubeSync will still get screenshots write `nfo` files etc. for files you
|
||||||
|
manually import if enabled at the source level.
|
||||||
33
docs/reset-tasks.md
Normal file
33
docs/reset-tasks.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - reset tasks from the command line
|
||||||
|
|
||||||
|
This is a new feature in v1.0 of TubeSync and later. It allows you to reset all
|
||||||
|
scheduled tasks from the command line as well as the "reset tasks" button in the
|
||||||
|
"tasks" tab of the dashboard.
|
||||||
|
|
||||||
|
This is useful for TubeSync installations where you may have a lot of media and
|
||||||
|
sources added and the "reset tasks" button may take too long to the extent where
|
||||||
|
the page times out (with a 502 error or similar issue).
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
You have added some sources and media
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Run the reset tasks command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py reset-tasks`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py reset-tasks`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
When this is run, new tasks will be immediately created so all your sources will be
|
||||||
|
indexed again straight away, any missing information such as thumbnails will be
|
||||||
|
redownloaded, etc.
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
|
from django.conf import settings
|
||||||
from django.forms import BaseForm
|
from django.forms import BaseForm
|
||||||
|
from basicauth.middleware import BasicAuthMiddleware as BaseBasicAuthMiddleware
|
||||||
|
|
||||||
|
|
||||||
class MaterializeDefaultFieldsMiddleware:
|
class MaterializeDefaultFieldsMiddleware:
|
||||||
@@ -19,3 +21,12 @@ class MaterializeDefaultFieldsMiddleware:
|
|||||||
for _, field in v.fields.items():
|
for _, field in v.fields.items():
|
||||||
field.widget.attrs.update({'class':'browser-default'})
|
field.widget.attrs.update({'class':'browser-default'})
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class BasicAuthMiddleware(BaseBasicAuthMiddleware):
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
bypass_uris = getattr(settings, 'BASICAUTH_ALWAYS_ALLOW_URIS', [])
|
||||||
|
if request.path in bypass_uris:
|
||||||
|
return None
|
||||||
|
return super().process_request(request)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
}
|
}
|
||||||
.help-text {
|
.help-text {
|
||||||
color: $form-help-text-colour;
|
color: $form-help-text-colour;
|
||||||
padding: 1rem 0 1rem 0;
|
padding-bottom: 1rem;
|
||||||
}
|
}
|
||||||
label {
|
label {
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
|
|||||||
@@ -5,6 +5,13 @@ html {
|
|||||||
color: $text-colour;
|
color: $text-colour;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
display: flex;
|
||||||
|
min-height: 100vh;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
header {
|
header {
|
||||||
|
|
||||||
background-color: $header-background-colour;
|
background-color: $header-background-colour;
|
||||||
@@ -174,8 +181,10 @@ main {
|
|||||||
display: inline-block;
|
display: inline-block;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
padding: 5px 10px 5px 10px;
|
padding: 5px 8px 4px 8px;
|
||||||
margin: 0 3px 0 3px;
|
margin: 0 3px 6px 3px;
|
||||||
|
min-width: 40px;
|
||||||
|
min-height: 40px;
|
||||||
background-color: $pagination-background-colour;
|
background-color: $pagination-background-colour;
|
||||||
color: $pagination-text-colour;
|
color: $pagination-text-colour;
|
||||||
border: 2px $pagination-border-colour solid;
|
border: 2px $pagination-border-colour solid;
|
||||||
|
|||||||
@@ -16,32 +16,36 @@
|
|||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|
||||||
<header>
|
<div class="app">
|
||||||
<div class="container">
|
|
||||||
<a href="{% url 'sync:dashboard' %}">
|
|
||||||
{% include 'tubesync.svg' with width='3rem' height='3rem' %}
|
|
||||||
<h1>TubeSync</h1>
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<nav>
|
<header>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<ul>
|
<a href="{% url 'sync:dashboard' %}">
|
||||||
<li><a href="{% url 'sync:dashboard' %}"><i class="fas fa-fw fa-th-large"></i><span class="hide-on-med-and-down"> Dashboard</span></a></li>
|
{% include 'tubesync.svg' with width='3rem' height='3rem' %}
|
||||||
<li><a href="{% url 'sync:sources' %}"><i class="fas fa-fw fa-play"></i><span class="hide-on-med-and-down"> Sources</span></a></li>
|
<h1>TubeSync</h1>
|
||||||
<li><a href="{% url 'sync:media' %}"><i class="fas fa-fw fa-film"></i><span class="hide-on-med-and-down"> Media</span></a></li>
|
</a>
|
||||||
<li><a href="{% url 'sync:tasks' %}"><i class="far fa-fw fa-clock"></i><span class="hide-on-med-and-down"> Tasks</span></a></li>
|
</div>
|
||||||
<li><a href="{% url 'sync:mediaservers' %}"><i class="fas fa-fw fa-stream"></i><span class="hide-on-med-and-down"> Media Servers</span></a></li>
|
</header>
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
|
|
||||||
<main>
|
<nav>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
{% block content %}{% endblock %}
|
<ul>
|
||||||
</div>
|
<li><a href="{% url 'sync:dashboard' %}"><i class="fas fa-fw fa-th-large"></i><span class="hide-on-med-and-down"> Dashboard</span></a></li>
|
||||||
</main>
|
<li><a href="{% url 'sync:sources' %}"><i class="fas fa-fw fa-play"></i><span class="hide-on-med-and-down"> Sources</span></a></li>
|
||||||
|
<li><a href="{% url 'sync:media' %}"><i class="fas fa-fw fa-film"></i><span class="hide-on-med-and-down"> Media</span></a></li>
|
||||||
|
<li><a href="{% url 'sync:tasks' %}"><i class="far fa-fw fa-clock"></i><span class="hide-on-med-and-down"> Tasks</span></a></li>
|
||||||
|
<li><a href="{% url 'sync:mediaservers' %}"><i class="fas fa-fw fa-stream"></i><span class="hide-on-med-and-down"> Media Servers</span></a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div class="container">
|
||||||
|
{% block content %}{% endblock %}
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
<div class="pagination">
|
<div class="pagination">
|
||||||
{% for i in paginator.page_range %}
|
{% for i in paginator.page_range %}
|
||||||
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}">{{ i }}</a>
|
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}">{{ i }}</a>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -14,3 +14,13 @@ def append_uri_params(uri, params):
|
|||||||
uri = str(uri)
|
uri = str(uri)
|
||||||
qs = urlencode(params)
|
qs = urlencode(params)
|
||||||
return urlunsplit(('', '', uri, qs, ''))
|
return urlunsplit(('', '', uri, qs, ''))
|
||||||
|
|
||||||
|
|
||||||
|
def clean_filename(filename):
|
||||||
|
if not isinstance(filename, str):
|
||||||
|
raise ValueError(f'filename must be a str, got {type(filename)}')
|
||||||
|
to_scrub = '<>\/:*?"|%'
|
||||||
|
for char in to_scrub:
|
||||||
|
filename = filename.replace(char, '')
|
||||||
|
filename = ''.join([c for c in filename if ord(c) > 30])
|
||||||
|
return ' '.join(filename.split())
|
||||||
|
|||||||
0
tubesync/healthcheck.py
Normal file → Executable file
0
tubesync/healthcheck.py
Normal file → Executable file
1183
tubesync/spam
1183
tubesync/spam
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,7 @@ class SourceAdmin(admin.ModelAdmin):
|
|||||||
|
|
||||||
ordering = ('-created',)
|
ordering = ('-created',)
|
||||||
list_display = ('uuid', 'name', 'source_type', 'last_crawl',
|
list_display = ('uuid', 'name', 'source_type', 'last_crawl',
|
||||||
'has_failed')
|
'download_media', 'has_failed')
|
||||||
readonly_fields = ('uuid', 'created')
|
readonly_fields = ('uuid', 'created')
|
||||||
search_fields = ('uuid', 'key', 'name')
|
search_fields = ('uuid', 'key', 'name')
|
||||||
|
|
||||||
|
|||||||
55
tubesync/sync/management/commands/import-existing-media.py
Normal file
55
tubesync/sync/management/commands/import-existing-media.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = ('Scans download media directories for media not yet downloaded and ',
|
||||||
|
'marks them as downloaded')
|
||||||
|
extra_extensions = ['mp3', 'mp4', 'avi']
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Building directory to Source map...')
|
||||||
|
dirmap = {}
|
||||||
|
for s in Source.objects.all():
|
||||||
|
dirmap[s.directory_path] = s
|
||||||
|
log.info(f'Scanning sources...')
|
||||||
|
file_extensions = list(Source.EXTENSIONS) + self.extra_extensions
|
||||||
|
for sourceroot, source in dirmap.items():
|
||||||
|
media = list(Media.objects.filter(source=source, downloaded=False,
|
||||||
|
skip=False))
|
||||||
|
if not media:
|
||||||
|
log.info(f'Source "{source}" has no missing media')
|
||||||
|
continue
|
||||||
|
log.info(f'Scanning Source "{source}" directory for media to '
|
||||||
|
f'import: {sourceroot}, looking for {len(media)} '
|
||||||
|
f'undownloaded and unskipped items')
|
||||||
|
on_disk = []
|
||||||
|
for (root, dirs, files) in os.walk(sourceroot):
|
||||||
|
rootpath = Path(root)
|
||||||
|
for filename in files:
|
||||||
|
filepart, ext = os.path.splitext(filename)
|
||||||
|
if ext.startswith('.'):
|
||||||
|
ext = ext[1:]
|
||||||
|
ext = ext.strip().lower()
|
||||||
|
if ext not in file_extensions:
|
||||||
|
continue
|
||||||
|
on_disk.append(str(rootpath / filename))
|
||||||
|
filemap = {}
|
||||||
|
for item in media:
|
||||||
|
for filepath in on_disk:
|
||||||
|
if item.key in filepath:
|
||||||
|
# The unique item key is in the file name on disk, map it to
|
||||||
|
# the undownloaded media item
|
||||||
|
filemap[filepath] = item
|
||||||
|
continue
|
||||||
|
for filepath, item in filemap.items():
|
||||||
|
log.info(f'Matched on-disk file: {filepath} '
|
||||||
|
f'to media item: {item.source} / {item}')
|
||||||
|
item.media_file.name = filepath
|
||||||
|
item.downloaded = True
|
||||||
|
item.save()
|
||||||
|
log.info('Done')
|
||||||
32
tubesync/sync/management/commands/reset-tasks.py
Normal file
32
tubesync/sync/management/commands/reset-tasks.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from background_task.models import Task
|
||||||
|
from sync.models import Source
|
||||||
|
from sync.tasks import index_source_task
|
||||||
|
|
||||||
|
|
||||||
|
from common.logger import log
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Resets all tasks'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Resettings all tasks...')
|
||||||
|
# Delete all tasks
|
||||||
|
Task.objects.all().delete()
|
||||||
|
# Iter all tasks
|
||||||
|
for source in Source.objects.all():
|
||||||
|
# Recreate the initial indexing task
|
||||||
|
verbose_name = _('Index media from source "{}"')
|
||||||
|
index_source_task(
|
||||||
|
str(source.pk),
|
||||||
|
repeat=source.index_schedule,
|
||||||
|
queue=str(source.pk),
|
||||||
|
priority=5,
|
||||||
|
verbose_name=verbose_name.format(source.name)
|
||||||
|
)
|
||||||
|
# This also chains down to call each Media objects .save() as well
|
||||||
|
source.save()
|
||||||
|
log.info('Done')
|
||||||
34
tubesync/sync/management/commands/sync-missing-metadata.py
Normal file
34
tubesync/sync/management/commands/sync-missing-metadata.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import os
|
||||||
|
from shutil import copyfile
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db.models import Q
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media
|
||||||
|
from sync.utils import write_text_file
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Syncs missing metadata (such as nfo files) if source settings are updated'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Syncing missing metadata...')
|
||||||
|
sources = Source.objects.filter(Q(copy_thumbnails=True) | Q(write_nfo=True))
|
||||||
|
for source in sources.order_by('name'):
|
||||||
|
log.info(f'Finding media for source: {source}')
|
||||||
|
for item in Media.objects.filter(source=source, downloaded=True):
|
||||||
|
log.info(f'Checking media for missing metadata: {source} / {item}')
|
||||||
|
thumbpath = item.thumbpath
|
||||||
|
if not thumbpath.is_file():
|
||||||
|
if item.thumb:
|
||||||
|
log.info(f'Copying missing thumbnail from: {item.thumb.path} '
|
||||||
|
f'to: {thumbpath}')
|
||||||
|
copyfile(item.thumb.path, thumbpath)
|
||||||
|
else:
|
||||||
|
log.error(f'Tried to copy missing thumbnail for {item} but '
|
||||||
|
f'the thumbnail has not been downloaded')
|
||||||
|
nfopath = item.nfopath
|
||||||
|
if not nfopath.is_file():
|
||||||
|
log.info(f'Writing missing NFO file: {nfopath}')
|
||||||
|
write_text_file(nfopath, item.nfoxml)
|
||||||
|
log.info('Done')
|
||||||
@@ -66,7 +66,7 @@ def get_best_audio_format(media):
|
|||||||
# No codecs matched
|
# No codecs matched
|
||||||
if media.source.can_fallback:
|
if media.source.can_fallback:
|
||||||
# Can fallback, find the next highest bitrate non-matching codec
|
# Can fallback, find the next highest bitrate non-matching codec
|
||||||
return False, audio_formats[0]
|
return False, audio_formats[0]['id']
|
||||||
else:
|
else:
|
||||||
# Can't fallback
|
# Can't fallback
|
||||||
return False, False
|
return False, False
|
||||||
|
|||||||
30
tubesync/sync/migrations/0009_auto_20210218_0442.py
Normal file
30
tubesync/sync/migrations/0009_auto_20210218_0442.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Generated by Django 3.1.6 on 2021-02-18 04:42
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0008_source_download_cap'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='download_media',
|
||||||
|
field=models.BooleanField(default=True, help_text='Download media from this source, if not selected the source will only be indexed', verbose_name='download media'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='media_file',
|
||||||
|
field=models.FileField(blank=True, help_text='Media file', max_length=200, null=True, storage=django.core.files.storage.FileSystemStorage(location='/home/meeb/Repos/github.com/meeb/tubesync/tubesync/downloads'), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='media_format',
|
||||||
|
field=models.CharField(default='{yyyymmdd}_{source}_{title}_{key}_{format}.{ext}', help_text='File format to use for saving files, detailed options at bottom of page.', max_length=200, verbose_name='media format'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -12,6 +12,7 @@ from django.utils.text import slugify
|
|||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from common.errors import NoFormatException
|
from common.errors import NoFormatException
|
||||||
|
from common.utils import clean_filename
|
||||||
from .youtube import (get_media_info as get_youtube_media_info,
|
from .youtube import (get_media_info as get_youtube_media_info,
|
||||||
download_media as download_youtube_media)
|
download_media as download_youtube_media)
|
||||||
from .utils import seconds_to_timestr, parse_media_format
|
from .utils import seconds_to_timestr, parse_media_format
|
||||||
@@ -100,6 +101,11 @@ class Source(models.Model):
|
|||||||
(FALLBACK_NEXT_BEST_HD, _('Get next best resolution but at least HD'))
|
(FALLBACK_NEXT_BEST_HD, _('Get next best resolution but at least HD'))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
EXTENSION_M4A = 'm4a'
|
||||||
|
EXTENSION_OGG = 'ogg'
|
||||||
|
EXTENSION_MKV = 'mkv'
|
||||||
|
EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV)
|
||||||
|
|
||||||
# Fontawesome icons used for the source on the front end
|
# Fontawesome icons used for the source on the front end
|
||||||
ICONS = {
|
ICONS = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
|
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
|
||||||
@@ -112,6 +118,12 @@ class Source(models.Model):
|
|||||||
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}',
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}',
|
||||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||||
}
|
}
|
||||||
|
# Format used to create indexable URLs
|
||||||
|
INDEX_URLS = {
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/c/{key}/videos',
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}/videos',
|
||||||
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||||
|
}
|
||||||
# Callback functions to get a list of media from the source
|
# Callback functions to get a list of media from the source
|
||||||
INDEXERS = {
|
INDEXERS = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||||
@@ -200,7 +212,7 @@ class Source(models.Model):
|
|||||||
_('media format'),
|
_('media format'),
|
||||||
max_length=200,
|
max_length=200,
|
||||||
default=settings.MEDIA_FORMATSTR_DEFAULT,
|
default=settings.MEDIA_FORMATSTR_DEFAULT,
|
||||||
help_text=_('File format to use for saving files')
|
help_text=_('File format to use for saving files, detailed options at bottom of page.')
|
||||||
)
|
)
|
||||||
index_schedule = models.IntegerField(
|
index_schedule = models.IntegerField(
|
||||||
_('index schedule'),
|
_('index schedule'),
|
||||||
@@ -209,6 +221,11 @@ class Source(models.Model):
|
|||||||
default=IndexSchedule.EVERY_6_HOURS,
|
default=IndexSchedule.EVERY_6_HOURS,
|
||||||
help_text=_('Schedule of how often to index the source for new media')
|
help_text=_('Schedule of how often to index the source for new media')
|
||||||
)
|
)
|
||||||
|
download_media = models.BooleanField(
|
||||||
|
_('download media'),
|
||||||
|
default=True,
|
||||||
|
help_text=_('Download media from this source, if not selected the source will only be indexed')
|
||||||
|
)
|
||||||
download_cap = models.IntegerField(
|
download_cap = models.IntegerField(
|
||||||
_('download cap'),
|
_('download cap'),
|
||||||
choices=CapChoices.choices,
|
choices=CapChoices.choices,
|
||||||
@@ -327,23 +344,32 @@ class Source(models.Model):
|
|||||||
'''
|
'''
|
||||||
if self.is_audio:
|
if self.is_audio:
|
||||||
if self.source_acodec == self.SOURCE_ACODEC_MP4A:
|
if self.source_acodec == self.SOURCE_ACODEC_MP4A:
|
||||||
return 'm4a'
|
return self.EXTENSION_M4A
|
||||||
elif self.source_acodec == self.SOURCE_ACODEC_OPUS:
|
elif self.source_acodec == self.SOURCE_ACODEC_OPUS:
|
||||||
return 'ogg'
|
return self.EXTENSION_OGG
|
||||||
else:
|
else:
|
||||||
raise ValueError('Unable to choose audio extension, uknown acodec')
|
raise ValueError('Unable to choose audio extension, uknown acodec')
|
||||||
else:
|
else:
|
||||||
return 'mkv'
|
return self.EXTENSION_MKV
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_url(obj, source_type, key):
|
def create_url(obj, source_type, key):
|
||||||
url = obj.URLS.get(source_type)
|
url = obj.URLS.get(source_type)
|
||||||
return url.format(key=key)
|
return url.format(key=key)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_index_url(obj, source_type, key):
|
||||||
|
url = obj.INDEX_URLS.get(source_type)
|
||||||
|
return url.format(key=key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
return Source.create_url(self.source_type, self.key)
|
return Source.create_url(self.source_type, self.key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def index_url(self):
|
||||||
|
return Source.create_index_url(self.source_type, self.key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def format_summary(self):
|
def format_summary(self):
|
||||||
if self.source_resolution == Source.SOURCE_RESOLUTION_AUDIO:
|
if self.source_resolution == Source.SOURCE_RESOLUTION_AUDIO:
|
||||||
@@ -436,25 +462,10 @@ class Source(models.Model):
|
|||||||
indexer = self.INDEXERS.get(self.source_type, None)
|
indexer = self.INDEXERS.get(self.source_type, None)
|
||||||
if not callable(indexer):
|
if not callable(indexer):
|
||||||
raise Exception(f'Source type f"{self.source_type}" has no indexer')
|
raise Exception(f'Source type f"{self.source_type}" has no indexer')
|
||||||
response = indexer(self.url)
|
response = indexer(self.index_url)
|
||||||
|
if not isinstance(response, dict):
|
||||||
# Account for nested playlists, such as a channel of playlists of playlists
|
return []
|
||||||
def _recurse_playlists(playlist):
|
return response.get('entries', [])
|
||||||
videos = []
|
|
||||||
if not playlist:
|
|
||||||
return videos
|
|
||||||
entries = playlist.get('entries', [])
|
|
||||||
for entry in entries:
|
|
||||||
if not entry:
|
|
||||||
continue
|
|
||||||
subentries = entry.get('entries', [])
|
|
||||||
if subentries:
|
|
||||||
videos = videos + _recurse_playlists(entry)
|
|
||||||
else:
|
|
||||||
videos.append(entry)
|
|
||||||
return videos
|
|
||||||
|
|
||||||
return _recurse_playlists(response)
|
|
||||||
|
|
||||||
|
|
||||||
def get_media_thumb_path(instance, filename):
|
def get_media_thumb_path(instance, filename):
|
||||||
@@ -480,6 +491,12 @@ class Media(models.Model):
|
|||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/watch?v={key}',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/watch?v={key}',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/watch?v={key}',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/watch?v={key}',
|
||||||
}
|
}
|
||||||
|
# Callback functions to get a list of media from the source
|
||||||
|
INDEXERS = {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: get_youtube_media_info,
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: get_youtube_media_info,
|
||||||
|
}
|
||||||
# Maps standardised names to names used in source metdata
|
# Maps standardised names to names used in source metdata
|
||||||
METADATA_FIELDS = {
|
METADATA_FIELDS = {
|
||||||
'upload_date': {
|
'upload_date': {
|
||||||
@@ -557,14 +574,18 @@ class Media(models.Model):
|
|||||||
STATE_SCHEDULED = 'scheduled'
|
STATE_SCHEDULED = 'scheduled'
|
||||||
STATE_DOWNLOADING = 'downloading'
|
STATE_DOWNLOADING = 'downloading'
|
||||||
STATE_DOWNLOADED = 'downloaded'
|
STATE_DOWNLOADED = 'downloaded'
|
||||||
|
STATE_SKIPPED = 'skipped'
|
||||||
|
STATE_DISABLED_AT_SOURCE = 'source-disabled'
|
||||||
STATE_ERROR = 'error'
|
STATE_ERROR = 'error'
|
||||||
STATES = (STATE_UNKNOWN, STATE_SCHEDULED, STATE_DOWNLOADING, STATE_DOWNLOADED,
|
STATES = (STATE_UNKNOWN, STATE_SCHEDULED, STATE_DOWNLOADING, STATE_DOWNLOADED,
|
||||||
STATE_ERROR)
|
STATE_SKIPPED, STATE_DISABLED_AT_SOURCE, STATE_ERROR)
|
||||||
STATE_ICONS = {
|
STATE_ICONS = {
|
||||||
STATE_UNKNOWN: '<i class="far fa-question-circle" title="Unknown download state"></i>',
|
STATE_UNKNOWN: '<i class="far fa-question-circle" title="Unknown download state"></i>',
|
||||||
STATE_SCHEDULED: '<i class="far fa-clock" title="Scheduled to download"></i>',
|
STATE_SCHEDULED: '<i class="far fa-clock" title="Scheduled to download"></i>',
|
||||||
STATE_DOWNLOADING: '<i class="fas fa-download" title="Downloading now"></i>',
|
STATE_DOWNLOADING: '<i class="fas fa-download" title="Downloading now"></i>',
|
||||||
STATE_DOWNLOADED: '<i class="far fa-check-circle" title="Downloaded"></i>',
|
STATE_DOWNLOADED: '<i class="far fa-check-circle" title="Downloaded"></i>',
|
||||||
|
STATE_SKIPPED: '<i class="fas fa-exclamation-circle" title="Skipped"></i>',
|
||||||
|
STATE_DISABLED_AT_SOURCE: '<i class="fas fa-stop-circle" title="Media downloading disabled at source"></i>',
|
||||||
STATE_ERROR: '<i class="fas fa-exclamation-triangle" title="Error downloading"></i>',
|
STATE_ERROR: '<i class="fas fa-exclamation-triangle" title="Error downloading"></i>',
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -838,8 +859,9 @@ class Media(models.Model):
|
|||||||
fmt.append(resolution)
|
fmt.append(resolution)
|
||||||
vcodec = vformat['vcodec'].lower()
|
vcodec = vformat['vcodec'].lower()
|
||||||
fmt.append(vcodec)
|
fmt.append(vcodec)
|
||||||
acodec = aformat['acodec'].lower()
|
if aformat:
|
||||||
fmt.append(acodec)
|
acodec = aformat['acodec'].lower()
|
||||||
|
fmt.append(acodec)
|
||||||
if vformat:
|
if vformat:
|
||||||
if vformat['is_60fps']:
|
if vformat['is_60fps']:
|
||||||
fps = '60fps'
|
fps = '60fps'
|
||||||
@@ -887,7 +909,7 @@ class Media(models.Model):
|
|||||||
'source': self.source.slugname,
|
'source': self.source.slugname,
|
||||||
'source_full': self.source.name,
|
'source_full': self.source.name,
|
||||||
'title': self.slugtitle,
|
'title': self.slugtitle,
|
||||||
'title_full': self.title,
|
'title_full': clean_filename(self.title),
|
||||||
'key': self.key,
|
'key': self.key,
|
||||||
'format': '-'.join(display_format['format']),
|
'format': '-'.join(display_format['format']),
|
||||||
'playlist_index': self.playlist_index,
|
'playlist_index': self.playlist_index,
|
||||||
@@ -902,10 +924,17 @@ class Media(models.Model):
|
|||||||
'hdr': display_format['hdr'],
|
'hdr': display_format['hdr'],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_metadata(self):
|
||||||
|
return self.metadata is not None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def loaded_metadata(self):
|
def loaded_metadata(self):
|
||||||
try:
|
try:
|
||||||
return json.loads(self.metadata)
|
data = json.loads(self.metadata)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return {}
|
||||||
|
return data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@@ -942,7 +971,10 @@ class Media(models.Model):
|
|||||||
@property
|
@property
|
||||||
def upload_date(self):
|
def upload_date(self):
|
||||||
field = self.get_metadata_field('upload_date')
|
field = self.get_metadata_field('upload_date')
|
||||||
upload_date_str = self.loaded_metadata.get(field, '').strip()
|
try:
|
||||||
|
upload_date_str = self.loaded_metadata.get(field, '').strip()
|
||||||
|
except (AttributeError, ValueError) as e:
|
||||||
|
return None
|
||||||
try:
|
try:
|
||||||
return datetime.strptime(upload_date_str, '%Y%m%d')
|
return datetime.strptime(upload_date_str, '%Y%m%d')
|
||||||
except (AttributeError, ValueError) as e:
|
except (AttributeError, ValueError) as e:
|
||||||
@@ -974,8 +1006,12 @@ class Media(models.Model):
|
|||||||
def votes(self):
|
def votes(self):
|
||||||
field = self.get_metadata_field('upvotes')
|
field = self.get_metadata_field('upvotes')
|
||||||
upvotes = self.loaded_metadata.get(field, 0)
|
upvotes = self.loaded_metadata.get(field, 0)
|
||||||
|
if not isinstance(upvotes, int):
|
||||||
|
upvotes = 0
|
||||||
field = self.get_metadata_field('downvotes')
|
field = self.get_metadata_field('downvotes')
|
||||||
downvotes = self.loaded_metadata.get(field, 0)
|
downvotes = self.loaded_metadata.get(field, 0)
|
||||||
|
if not isinstance(downvotes, int):
|
||||||
|
downvotes = 0
|
||||||
return upvotes + downvotes
|
return upvotes + downvotes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -1005,7 +1041,7 @@ class Media(models.Model):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def filename(self):
|
def filename(self):
|
||||||
# Otherwise, create a suitable filename from the source media_format
|
# Create a suitable filename from the source media_format
|
||||||
media_format = str(self.source.media_format)
|
media_format = str(self.source.media_format)
|
||||||
media_details = self.format_dict
|
media_details = self.format_dict
|
||||||
return media_format.format(**media_details)
|
return media_format.format(**media_details)
|
||||||
@@ -1161,6 +1197,10 @@ class Media(models.Model):
|
|||||||
return self.STATE_ERROR
|
return self.STATE_ERROR
|
||||||
else:
|
else:
|
||||||
return self.STATE_SCHEDULED
|
return self.STATE_SCHEDULED
|
||||||
|
if self.skip:
|
||||||
|
return self.STATE_SKIPPED
|
||||||
|
if not self.source.download_media:
|
||||||
|
return self.STATE_DISABLED_AT_SOURCE
|
||||||
return self.STATE_UNKNOWN
|
return self.STATE_UNKNOWN
|
||||||
|
|
||||||
def get_download_state_icon(self, task=None):
|
def get_download_state_icon(self, task=None):
|
||||||
@@ -1178,6 +1218,16 @@ class Media(models.Model):
|
|||||||
# Return the download paramaters
|
# Return the download paramaters
|
||||||
return format_str, self.source.extension
|
return format_str, self.source.extension
|
||||||
|
|
||||||
|
def index_metadata(self):
|
||||||
|
'''
|
||||||
|
Index the media metadata returning a dict of info.
|
||||||
|
'''
|
||||||
|
indexer = self.INDEXERS.get(self.source.source_type, None)
|
||||||
|
if not callable(indexer):
|
||||||
|
raise Exception(f'Meida with source type f"{self.source.source_type}" '
|
||||||
|
f'has no indexer')
|
||||||
|
return indexer(self.url)
|
||||||
|
|
||||||
|
|
||||||
class MediaServer(models.Model):
|
class MediaServer(models.Model):
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -8,8 +8,9 @@ from background_task.models import Task
|
|||||||
from common.logger import log
|
from common.logger import log
|
||||||
from .models import Source, Media, MediaServer
|
from .models import Source, Media, MediaServer
|
||||||
from .tasks import (delete_task_by_source, delete_task_by_media, index_source_task,
|
from .tasks import (delete_task_by_source, delete_task_by_media, index_source_task,
|
||||||
download_media_thumbnail, map_task_to_instance,
|
download_media_thumbnail, download_media_metadata,
|
||||||
check_source_directory_exists, download_media, rescan_media_server)
|
map_task_to_instance, check_source_directory_exists,
|
||||||
|
download_media, rescan_media_server)
|
||||||
from .utils import delete_file
|
from .utils import delete_file
|
||||||
|
|
||||||
|
|
||||||
@@ -93,16 +94,27 @@ def task_task_failed(sender, task_id, completed_task, **kwargs):
|
|||||||
def media_post_save(sender, instance, created, **kwargs):
|
def media_post_save(sender, instance, created, **kwargs):
|
||||||
# Triggered after media is saved, Recalculate the "can_download" flag, this may
|
# Triggered after media is saved, Recalculate the "can_download" flag, this may
|
||||||
# need to change if the source specifications have been changed
|
# need to change if the source specifications have been changed
|
||||||
post_save.disconnect(media_post_save, sender=Media)
|
if instance.metadata:
|
||||||
if instance.get_format_str():
|
post_save.disconnect(media_post_save, sender=Media)
|
||||||
if not instance.can_download:
|
if instance.get_format_str():
|
||||||
instance.can_download = True
|
if not instance.can_download:
|
||||||
instance.save()
|
instance.can_download = True
|
||||||
else:
|
instance.save()
|
||||||
if instance.can_download:
|
else:
|
||||||
instance.can_download = False
|
if instance.can_download:
|
||||||
instance.save()
|
instance.can_download = False
|
||||||
post_save.connect(media_post_save, sender=Media)
|
instance.save()
|
||||||
|
post_save.connect(media_post_save, sender=Media)
|
||||||
|
# If the media is missing metadata schedule it to be downloaded
|
||||||
|
if not instance.metadata:
|
||||||
|
log.info(f'Scheduling task to download metadata for: {instance.url}')
|
||||||
|
verbose_name = _('Downloading metadata for "{}"')
|
||||||
|
download_media_metadata(
|
||||||
|
str(instance.pk),
|
||||||
|
priority=10,
|
||||||
|
verbose_name=verbose_name.format(instance.pk),
|
||||||
|
remove_existing_tasks=True
|
||||||
|
)
|
||||||
# If the media is missing a thumbnail schedule it to be downloaded
|
# If the media is missing a thumbnail schedule it to be downloaded
|
||||||
if not instance.thumb_file_exists:
|
if not instance.thumb_file_exists:
|
||||||
instance.thumb = None
|
instance.thumb = None
|
||||||
@@ -124,7 +136,8 @@ def media_post_save(sender, instance, created, **kwargs):
|
|||||||
if not instance.media_file_exists:
|
if not instance.media_file_exists:
|
||||||
instance.downloaded = False
|
instance.downloaded = False
|
||||||
instance.media_file = None
|
instance.media_file = None
|
||||||
if not instance.downloaded and instance.can_download and not instance.skip:
|
if (not instance.downloaded and instance.can_download and not instance.skip
|
||||||
|
and instance.source.download_media):
|
||||||
delete_task_by_media('sync.tasks.download_media', (str(instance.pk),))
|
delete_task_by_media('sync.tasks.download_media', (str(instance.pk),))
|
||||||
verbose_name = _('Downloading media for "{}"')
|
verbose_name = _('Downloading media for "{}"')
|
||||||
download_media(
|
download_media(
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import math
|
|||||||
import uuid
|
import uuid
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
from datetime import timedelta
|
from datetime import timedelta, datetime
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -179,30 +179,6 @@ def index_source_task(source_id):
|
|||||||
except Media.DoesNotExist:
|
except Media.DoesNotExist:
|
||||||
media = Media(key=key)
|
media = Media(key=key)
|
||||||
media.source = source
|
media.source = source
|
||||||
media.metadata = json.dumps(video)
|
|
||||||
upload_date = media.upload_date
|
|
||||||
# Media must have a valid upload date
|
|
||||||
if upload_date:
|
|
||||||
media.published = timezone.make_aware(upload_date)
|
|
||||||
else:
|
|
||||||
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
|
||||||
continue
|
|
||||||
# If the source has a download cap date check the upload date is allowed
|
|
||||||
max_cap_age = source.download_cap_date
|
|
||||||
if max_cap_age:
|
|
||||||
if media.published < max_cap_age:
|
|
||||||
# Media was published after the cap date, skip it
|
|
||||||
log.warn(f'Media: {source} / {media} is older than cap age '
|
|
||||||
f'{max_cap_age}, skipping')
|
|
||||||
continue
|
|
||||||
# If the source has a cut-off check the upload date is within the allowed delta
|
|
||||||
if source.delete_old_media and source.days_to_keep > 0:
|
|
||||||
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
|
||||||
if media.published < delta:
|
|
||||||
# Media was published after the cutoff date, skip it
|
|
||||||
log.warn(f'Media: {source} / {media} is older than '
|
|
||||||
f'{source.days_to_keep} days, skipping')
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
media.save()
|
media.save()
|
||||||
log.info(f'Indexed media: {source} / {media}')
|
log.info(f'Indexed media: {source} / {media}')
|
||||||
@@ -234,6 +210,61 @@ def check_source_directory_exists(source_id):
|
|||||||
source.make_directory()
|
source.make_directory()
|
||||||
|
|
||||||
|
|
||||||
|
@background(schedule=0)
|
||||||
|
def download_media_metadata(media_id):
|
||||||
|
'''
|
||||||
|
Downloads the metadata for a media item.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
media = Media.objects.get(pk=media_id)
|
||||||
|
except Media.DoesNotExist:
|
||||||
|
# Task triggered but the media no longer exists, do nothing
|
||||||
|
log.error(f'Task download_media_metadata(pk={media_id}) called but no '
|
||||||
|
f'media exists with ID: {media_id}')
|
||||||
|
return
|
||||||
|
source = media.source
|
||||||
|
metadata = media.index_metadata()
|
||||||
|
media.metadata = json.dumps(metadata)
|
||||||
|
upload_date = media.upload_date
|
||||||
|
# Media must have a valid upload date
|
||||||
|
if upload_date:
|
||||||
|
media.published = timezone.make_aware(upload_date)
|
||||||
|
else:
|
||||||
|
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
||||||
|
media.skip = True
|
||||||
|
# If the source has a download cap date check the upload date is allowed
|
||||||
|
max_cap_age = source.download_cap_date
|
||||||
|
if max_cap_age:
|
||||||
|
if media.published < max_cap_age:
|
||||||
|
# Media was published after the cap date, skip it
|
||||||
|
log.warn(f'Media: {source} / {media} is older than cap age '
|
||||||
|
f'{max_cap_age}, skipping')
|
||||||
|
media.skip = True
|
||||||
|
# If the source has a cut-off check the upload date is within the allowed delta
|
||||||
|
if source.delete_old_media and source.days_to_keep > 0:
|
||||||
|
if not isinstance(media.published, datetime):
|
||||||
|
# Media has no known published date or incomplete metadata
|
||||||
|
log.warn(f'Media: {source} / {media} has no published date, skipping')
|
||||||
|
media.skip = True
|
||||||
|
else:
|
||||||
|
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
||||||
|
if media.published < delta:
|
||||||
|
# Media was published after the cutoff date, skip it
|
||||||
|
log.warn(f'Media: {source} / {media} is older than '
|
||||||
|
f'{source.days_to_keep} days, skipping')
|
||||||
|
media.skip = True
|
||||||
|
# Check we can download the media item
|
||||||
|
if not media.skip:
|
||||||
|
if media.get_format_str():
|
||||||
|
media.can_download = True
|
||||||
|
else:
|
||||||
|
media.can_download = False
|
||||||
|
# Save the media
|
||||||
|
media.save()
|
||||||
|
log.info(f'Saved {len(media.metadata)} bytes of metadata for: '
|
||||||
|
f'{source} / {media_id}')
|
||||||
|
|
||||||
|
|
||||||
@background(schedule=0)
|
@background(schedule=0)
|
||||||
def download_media_thumbnail(media_id, url):
|
def download_media_thumbnail(media_id, url):
|
||||||
'''
|
'''
|
||||||
@@ -282,6 +313,17 @@ def download_media(media_id):
|
|||||||
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but it '
|
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but it '
|
||||||
f'is now marked to be skipped, not downloading')
|
f'is now marked to be skipped, not downloading')
|
||||||
return
|
return
|
||||||
|
if media.downloaded and media.media_file:
|
||||||
|
# Media has been marked as downloaded before the download_media task was fired,
|
||||||
|
# skip it
|
||||||
|
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but it '
|
||||||
|
f'has already been marked as downloaded, not downloading again')
|
||||||
|
return
|
||||||
|
if not media.source.download_media:
|
||||||
|
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but the '
|
||||||
|
f'source {media.source} has since been marked to not download media, '
|
||||||
|
f'not downloading')
|
||||||
|
return
|
||||||
filepath = media.filepath
|
filepath = media.filepath
|
||||||
log.info(f'Downloading media: {media} (UUID: {media.pk}) to: "{filepath}"')
|
log.info(f'Downloading media: {media} (UUID: {media.pk}) to: "{filepath}"')
|
||||||
format_str, container = media.download_media()
|
format_str, container = media.download_media()
|
||||||
|
|||||||
@@ -64,8 +64,14 @@
|
|||||||
<td class="hide-on-small-only">Fallback</td>
|
<td class="hide-on-small-only">Fallback</td>
|
||||||
<td><span class="hide-on-med-and-up">Fallback<br></span><strong>{{ media.source.get_fallback_display }}</strong></td>
|
<td><span class="hide-on-med-and-up">Fallback<br></span><strong>{{ media.source.get_fallback_display }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
{% if not media.source.download_media %}
|
||||||
|
<tr title="Is media marked to be downloaded at the source?">
|
||||||
|
<td class="hide-on-small-only">Source download?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Source download?<br></span><strong>{% if media.source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
{% if media.skip %}
|
{% if media.skip %}
|
||||||
<tr title="Has the media been downloaded?">
|
<tr title="Is the media marked to be skipped?">
|
||||||
<td class="hide-on-small-only">Skipping?</td>
|
<td class="hide-on-small-only">Skipping?</td>
|
||||||
<td><span class="hide-on-med-and-up">Skipping?<br></span><strong>{% if media.skip %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Skipping?<br></span><strong>{% if media.skip %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -109,7 +115,7 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
<tr title="Can the media be downloaded?">
|
<tr title="Can the media be downloaded?">
|
||||||
<td class="hide-on-small-only">Can download?</td>
|
<td class="hide-on-small-only">Can download?</td>
|
||||||
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if youtube_dl_format %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if media.can_download %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<tr title="The available media formats">
|
<tr title="The available media formats">
|
||||||
|
|||||||
@@ -4,9 +4,16 @@
|
|||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12 m9">
|
||||||
<h1 class="truncate">Media</h1>
|
<h1 class="truncate">Media</h1>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="col s12 m3">
|
||||||
|
{% if show_skipped %}
|
||||||
|
<a href="{% url 'sync:media' %}{% if source %}?filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye-slash"></i> Hide skipped media</a>
|
||||||
|
{% else %}
|
||||||
|
<a href="{% url 'sync:media' %}?show_skipped=yes{% if source %}&filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye"></i> Show skipped media</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% include 'infobox.html' with message=message %}
|
{% include 'infobox.html' with message=message %}
|
||||||
<div class="row no-margin-bottom">
|
<div class="row no-margin-bottom">
|
||||||
@@ -24,8 +31,12 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
{% if m.skip %}
|
{% if m.skip %}
|
||||||
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
|
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
|
||||||
|
{% elif not m.source.download_media %}
|
||||||
|
<span class="error-text"><i class="fas fa-times" title="Not downloading media for this source"></i> Disabled at source</span>
|
||||||
|
{% elif not m.has_metadata %}
|
||||||
|
<i class="far fa-clock" title="Waiting for metadata"></i> Fetching metadata
|
||||||
{% elif m.can_download %}
|
{% elif m.can_download %}
|
||||||
<i class="far fa-clock" title="Waiting to download or downloading"></i> {{ m.published|date:'Y-m-d' }}
|
<i class="far fa-clock" title="Waiting to download or downloading"></i> Downloading
|
||||||
{% else %}
|
{% else %}
|
||||||
<span class="error-text"><i class="fas fa-exclamation-triangle" title="No matching formats to download"></i> No matching formats</span>
|
<span class="error-text"><i class="fas fa-exclamation-triangle" title="No matching formats to download"></i> No matching formats</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -44,5 +55,5 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk %}
|
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -61,6 +61,10 @@
|
|||||||
<td class="hide-on-small-only">Index schedule</td>
|
<td class="hide-on-small-only">Index schedule</td>
|
||||||
<td><span class="hide-on-med-and-up">Index schedule<br></span><strong>{{ source.get_index_schedule_display }}</strong></td>
|
<td><span class="hide-on-med-and-up">Index schedule<br></span><strong>{{ source.get_index_schedule_display }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Download media from this source">
|
||||||
|
<td class="hide-on-small-only">Download media?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Download media?<br></span><strong>{% if source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
<tr title="When then source was created locally in TubeSync">
|
<tr title="When then source was created locally in TubeSync">
|
||||||
<td class="hide-on-small-only">Created</td>
|
<td class="hide-on-small-only">Created</td>
|
||||||
<td><span class="hide-on-med-and-up">Created<br></span><strong>{{ source.created|date:'Y-m-d H:i:s' }}</strong></td>
|
<td><span class="hide-on-med-and-up">Created<br></span><strong>{{ source.created|date:'Y-m-d H:i:s' }}</strong></td>
|
||||||
|
|||||||
@@ -30,7 +30,7 @@
|
|||||||
{% if source.has_failed %}
|
{% if source.has_failed %}
|
||||||
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
|
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
|
||||||
{% else %}
|
{% else %}
|
||||||
<strong>{{ source.media_count }}</strong> media items{% if source.delete_old_media and source.days_to_keep > 0 %}, keep {{ source.days_to_keep }} days of media{% endif %}
|
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</a>
|
</a>
|
||||||
{% empty %}
|
{% empty %}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from django.views.generic.detail import SingleObjectMixin
|
|||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.urls import reverse_lazy
|
from django.urls import reverse_lazy
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import Q, Count, Sum
|
from django.db.models import Q, Count, Sum, When, Case
|
||||||
from django.forms import ValidationError
|
from django.forms import ValidationError
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
@@ -104,7 +104,10 @@ class SourcesView(ListView):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
all_sources = Source.objects.all().order_by('name')
|
all_sources = Source.objects.all().order_by('name')
|
||||||
return all_sources.annotate(media_count=Count('media_source'))
|
return all_sources.annotate(
|
||||||
|
media_count=Count('media_source'),
|
||||||
|
downloaded_count=Count(Case(When(media_source__downloaded=True, then=1)))
|
||||||
|
)
|
||||||
|
|
||||||
def get_context_data(self, *args, **kwargs):
|
def get_context_data(self, *args, **kwargs):
|
||||||
data = super().get_context_data(*args, **kwargs)
|
data = super().get_context_data(*args, **kwargs)
|
||||||
@@ -274,9 +277,9 @@ class AddSourceView(CreateView):
|
|||||||
template_name = 'sync/source-add.html'
|
template_name = 'sync/source-add.html'
|
||||||
model = Source
|
model = Source
|
||||||
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
|
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
|
||||||
'index_schedule', 'download_cap', 'delete_old_media', 'days_to_keep',
|
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
|
||||||
'source_resolution', 'source_vcodec', 'source_acodec', 'prefer_60fps',
|
'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
|
||||||
'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
||||||
errors = {
|
errors = {
|
||||||
'invalid_media_format': _('Invalid media format, the media format contains '
|
'invalid_media_format': _('Invalid media format, the media format contains '
|
||||||
'errors or is empty. Check the table at the end of '
|
'errors or is empty. Check the table at the end of '
|
||||||
@@ -365,9 +368,9 @@ class UpdateSourceView(UpdateView):
|
|||||||
template_name = 'sync/source-update.html'
|
template_name = 'sync/source-update.html'
|
||||||
model = Source
|
model = Source
|
||||||
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
|
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
|
||||||
'index_schedule', 'download_cap', 'delete_old_media', 'days_to_keep',
|
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
|
||||||
'source_resolution', 'source_vcodec', 'source_acodec', 'prefer_60fps',
|
'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
|
||||||
'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
||||||
errors = {
|
errors = {
|
||||||
'invalid_media_format': _('Invalid media format, the media format contains '
|
'invalid_media_format': _('Invalid media format, the media format contains '
|
||||||
'errors or is empty. Check the table at the end of '
|
'errors or is empty. Check the table at the end of '
|
||||||
@@ -438,6 +441,7 @@ class MediaView(ListView):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.filter_source = None
|
self.filter_source = None
|
||||||
|
self.show_skipped = False
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
@@ -447,13 +451,22 @@ class MediaView(ListView):
|
|||||||
self.filter_source = Source.objects.get(pk=filter_by)
|
self.filter_source = Source.objects.get(pk=filter_by)
|
||||||
except Source.DoesNotExist:
|
except Source.DoesNotExist:
|
||||||
self.filter_source = None
|
self.filter_source = None
|
||||||
|
show_skipped = request.GET.get('show_skipped', '').strip()
|
||||||
|
if show_skipped == 'yes':
|
||||||
|
self.show_skipped = True
|
||||||
return super().dispatch(request, *args, **kwargs)
|
return super().dispatch(request, *args, **kwargs)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
if self.filter_source:
|
if self.filter_source:
|
||||||
q = Media.objects.filter(source=self.filter_source)
|
if self.show_skipped:
|
||||||
|
q = Media.objects.filter(source=self.filter_source)
|
||||||
|
else:
|
||||||
|
q = Media.objects.filter(source=self.filter_source, skip=False)
|
||||||
else:
|
else:
|
||||||
q = Media.objects.all()
|
if self.show_skipped:
|
||||||
|
q = Media.objects.all()
|
||||||
|
else:
|
||||||
|
q = Media.objects.filter(skip=False)
|
||||||
return q.order_by('-published', '-created')
|
return q.order_by('-published', '-created')
|
||||||
|
|
||||||
def get_context_data(self, *args, **kwargs):
|
def get_context_data(self, *args, **kwargs):
|
||||||
@@ -464,6 +477,7 @@ class MediaView(ListView):
|
|||||||
message = str(self.messages.get('filter', ''))
|
message = str(self.messages.get('filter', ''))
|
||||||
data['message'] = message.format(name=self.filter_source.name)
|
data['message'] = message.format(name=self.filter_source.name)
|
||||||
data['source'] = self.filter_source
|
data['source'] = self.filter_source
|
||||||
|
data['show_skipped'] = self.show_skipped
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -37,7 +37,8 @@ def get_media_info(url):
|
|||||||
'skip_download': True,
|
'skip_download': True,
|
||||||
'forcejson': True,
|
'forcejson': True,
|
||||||
'simulate': True,
|
'simulate': True,
|
||||||
'logger': log
|
'logger': log,
|
||||||
|
'extract_flat': True,
|
||||||
})
|
})
|
||||||
response = {}
|
response = {}
|
||||||
with youtube_dl.YoutubeDL(opts) as y:
|
with youtube_dl.YoutubeDL(opts) as y:
|
||||||
@@ -45,6 +46,11 @@ def get_media_info(url):
|
|||||||
response = y.extract_info(url, download=False)
|
response = y.extract_info(url, download=False)
|
||||||
except youtube_dl.utils.DownloadError as e:
|
except youtube_dl.utils.DownloadError as e:
|
||||||
raise YouTubeError(f'Failed to extract_info for "{url}": {e}') from e
|
raise YouTubeError(f'Failed to extract_info for "{url}": {e}') from e
|
||||||
|
if not response:
|
||||||
|
raise YouTubeError(f'Failed to extract_info for "{url}": No metadata was '
|
||||||
|
f'returned by youtube-dl, check for error messages in the '
|
||||||
|
f'logs above. This task will be retried later with an '
|
||||||
|
f'exponential backoff.')
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_THREADS = 2
|
DEFAULT_THREADS = 1
|
||||||
MAX_BACKGROUND_TASK_ASYNC_THREADS = 8
|
MAX_BACKGROUND_TASK_ASYNC_THREADS = 8
|
||||||
BACKGROUND_TASK_ASYNC_THREADS = int(os.getenv('TUBESYNC_WORKERS', DEFAULT_THREADS))
|
BACKGROUND_TASK_ASYNC_THREADS = int(os.getenv('TUBESYNC_WORKERS', DEFAULT_THREADS))
|
||||||
if BACKGROUND_TASK_ASYNC_THREADS > MAX_BACKGROUND_TASK_ASYNC_THREADS:
|
if BACKGROUND_TASK_ASYNC_THREADS > MAX_BACKGROUND_TASK_ASYNC_THREADS:
|
||||||
@@ -38,3 +38,15 @@ if BACKGROUND_TASK_ASYNC_THREADS > MAX_BACKGROUND_TASK_ASYNC_THREADS:
|
|||||||
MEDIA_ROOT = CONFIG_BASE_DIR / 'media'
|
MEDIA_ROOT = CONFIG_BASE_DIR / 'media'
|
||||||
DOWNLOAD_ROOT = DOWNLOADS_BASE_DIR
|
DOWNLOAD_ROOT = DOWNLOADS_BASE_DIR
|
||||||
YOUTUBE_DL_CACHEDIR = CONFIG_BASE_DIR / 'cache'
|
YOUTUBE_DL_CACHEDIR = CONFIG_BASE_DIR / 'cache'
|
||||||
|
|
||||||
|
|
||||||
|
BASICAUTH_USERNAME = os.getenv('HTTP_USER', '').strip()
|
||||||
|
BASICAUTH_PASSWORD = os.getenv('HTTP_PASS', '').strip()
|
||||||
|
if BASICAUTH_USERNAME and BASICAUTH_PASSWORD:
|
||||||
|
BASICAUTH_DISABLE = False
|
||||||
|
BASICAUTH_USERS = {
|
||||||
|
BASICAUTH_USERNAME: BASICAUTH_PASSWORD,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
BASICAUTH_DISABLE = True
|
||||||
|
BASICAUTH_USERS = {}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ CONFIG_BASE_DIR = BASE_DIR
|
|||||||
DOWNLOADS_BASE_DIR = BASE_DIR
|
DOWNLOADS_BASE_DIR = BASE_DIR
|
||||||
|
|
||||||
|
|
||||||
VERSION = 0.7
|
VERSION = '0.9.1'
|
||||||
SECRET_KEY = ''
|
SECRET_KEY = ''
|
||||||
DEBUG = False
|
DEBUG = False
|
||||||
ALLOWED_HOSTS = []
|
ALLOWED_HOSTS = []
|
||||||
@@ -37,6 +37,7 @@ MIDDLEWARE = [
|
|||||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||||
'common.middleware.MaterializeDefaultFieldsMiddleware',
|
'common.middleware.MaterializeDefaultFieldsMiddleware',
|
||||||
|
'common.middleware.BasicAuthMiddleware',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -114,11 +115,20 @@ Disallow: /
|
|||||||
'''.strip()
|
'''.strip()
|
||||||
|
|
||||||
|
|
||||||
|
X_FRAME_OPTIONS = 'SAMEORIGIN'
|
||||||
|
|
||||||
|
|
||||||
|
BASICAUTH_DISABLE = True
|
||||||
|
BASICAUTH_REALM = 'Authenticate to TubeSync'
|
||||||
|
BASICAUTH_ALWAYS_ALLOW_URIS = ('/healthcheck',)
|
||||||
|
BASICAUTH_USERS = {}
|
||||||
|
|
||||||
|
|
||||||
HEALTHCHECK_FIREWALL = True
|
HEALTHCHECK_FIREWALL = True
|
||||||
HEALTHCHECK_ALLOWED_IPS = ('127.0.0.1',)
|
HEALTHCHECK_ALLOWED_IPS = ('127.0.0.1',)
|
||||||
|
|
||||||
|
|
||||||
MAX_ATTEMPTS = 10 # Number of times tasks will be retried
|
MAX_ATTEMPTS = 15 # Number of times tasks will be retried
|
||||||
MAX_RUN_TIME = 1800 # Maximum amount of time in seconds a task can run
|
MAX_RUN_TIME = 1800 # Maximum amount of time in seconds a task can run
|
||||||
BACKGROUND_TASK_RUN_ASYNC = True # Run tasks async in the background
|
BACKGROUND_TASK_RUN_ASYNC = True # Run tasks async in the background
|
||||||
BACKGROUND_TASK_ASYNC_THREADS = 1 # Number of async tasks to run at once
|
BACKGROUND_TASK_ASYNC_THREADS = 1 # Number of async tasks to run at once
|
||||||
@@ -128,7 +138,7 @@ COMPLETED_TASKS_DAYS_TO_KEEP = 7 # Number of days to keep completed t
|
|||||||
|
|
||||||
|
|
||||||
SOURCES_PER_PAGE = 100
|
SOURCES_PER_PAGE = 100
|
||||||
MEDIA_PER_PAGE = 72
|
MEDIA_PER_PAGE = 144
|
||||||
TASKS_PER_PAGE = 100
|
TASKS_PER_PAGE = 100
|
||||||
|
|
||||||
|
|
||||||
@@ -149,7 +159,7 @@ YOUTUBE_DEFAULTS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
MEDIA_FORMATSTR_DEFAULT = '{yyyymmdd}_{source}_{title}_{key}_{format}.{ext}'
|
MEDIA_FORMATSTR_DEFAULT = '{yyyy_mm_dd}_{source}_{title}_{key}_{format}.{ext}'
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
Reference in New Issue
Block a user