Compare commits
383 Commits
Author | SHA1 | Date |
---|---|---|
|
c6acd5378c | |
|
e7788eb8fb | |
|
e4e0b48c0b | |
|
3573c1187f | |
|
b11b667aff | |
|
1b581aa4ba | |
|
7384c00713 | |
|
4fdd172b05 | |
|
9c18115032 | |
|
6853c1fa76 | |
|
ed07073cf4 | |
|
af94b37ee6 | |
|
ad1d49a835 | |
|
46ba2593a2 | |
|
46a43b968a | |
|
805a0eefbd | |
|
3a87b5779e | |
|
f86e72aa92 | |
|
f550e32b5e | |
|
034d877d6a | |
|
b9b702ab85 | |
|
c159c24d15 | |
|
6c9772d573 | |
|
45b8b3f65b | |
|
7aa9c0ec8a | |
|
e54a762a7b | |
|
512b70adad | |
|
6c21ff15ab | |
|
adf26cb4e3 | |
|
45c12561ba | |
|
2d6f485a5d | |
|
33b471175a | |
|
7f4e8586b7 | |
|
bab4b9b056 | |
|
30c2127271 | |
|
d1cb7ef76c | |
|
1fd4f87c53 | |
|
cf06f4cbc2 | |
|
0523f481d2 | |
|
aa4bd4ec26 | |
|
96d9ee93ef | |
|
43cf532903 | |
|
8240c49d5c | |
|
0c5e3d3818 | |
|
22edd1bbda | |
|
fea0bb191e | |
|
0f65a4027a | |
|
5cac374486 | |
|
69efc9298d | |
|
1be8dff769 | |
|
350e544594 | |
|
0542c734e5 | |
|
42b337c408 | |
|
2f82f8c599 | |
|
b57ca110b0 | |
|
e3e7352600 | |
|
6d3a7bf859 | |
|
25f622311f | |
|
adea4a0ecd | |
|
0d76f2f94e | |
|
71578d926e | |
|
777cdb5ecc | |
|
3dd445bf96 | |
|
86744c0510 | |
|
be7454f72a | |
|
e9f03cb6bf | |
|
ddc127e6af | |
|
63d32a1e11 | |
|
2ebbb8480e | |
|
21785e031a | |
|
f12e13162f | |
|
5c9c1550bf | |
|
12638afb60 | |
|
b9886a3b27 | |
|
612f78e7eb | |
|
0c5a9c53f8 | |
|
d439b2f223 | |
|
7116617cd2 | |
|
422d228359 | |
|
1f68be5c26 | |
|
089a487f3a | |
|
24ae70ea70 | |
|
72c3242e70 | |
|
f3e93c0ecf | |
|
fa8efb178e | |
|
2001faea44 | |
|
b370e98031 | |
|
55bfd911b9 | |
|
e47d0eb7be | |
|
a95c64bc10 | |
|
e9d4f89f39 | |
|
7876b48860 | |
|
2639d911ab | |
|
e4c0f0e98a | |
|
601449ce08 | |
|
fe4c876fdc | |
|
fbe9546a74 | |
|
ce14167cee | |
|
c927f32aa6 | |
|
1d5579aa31 | |
|
d8a9572411 | |
|
8315efac03 | |
|
35678e3be9 | |
|
e75b446883 | |
|
dd05595558 | |
|
2772e85d9f | |
|
931aa78815 | |
|
24a49d2f14 | |
|
f14d2dd29e | |
|
f4e5b6e76c | |
|
977f996d8e | |
|
dc5491455c | |
|
70ef11d552 | |
|
b04e237cb8 | |
|
55c58b4836 | |
|
e871983707 | |
|
b3f93ddef7 | |
|
bf7a0fcec0 | |
|
598ee2bd0a | |
|
7b12fe3fad | |
|
7358b52184 | |
|
4b4b4eb58d | |
|
b719fd5122 | |
|
4696aebebc | |
|
7d333487fe | |
|
844d17006e | |
|
f9a27eb33e | |
|
b8434ff444 | |
|
932eb4caf4 | |
|
812fbc5f46 | |
|
fdc591cc7c | |
|
4ae454a4f3 | |
|
4f6af702ae | |
|
2431f8775a | |
|
438316953a | |
|
85637fecba | |
|
f9dfffe91a | |
|
0845a6662d | |
|
419c4c5a9f | |
|
2f475bf2a8 | |
|
7d16a1714c | |
|
a7100a0f53 | |
|
5a4e6cee58 | |
|
e69adafcec | |
|
f9908a4d3b | |
|
bf99241ad2 | |
|
0e278bc8c4 | |
|
57921ca6b9 | |
|
fb23fdeae1 | |
|
433a7792d5 | |
|
e198cc011b | |
|
296a790af5 | |
|
e190821b7b | |
|
1ba865cf0d | |
|
05d50c958e | |
|
8426c7309a | |
|
0450d47d81 | |
|
e8d899d273 | |
|
25d5768f6e | |
|
e9a3f2dd59 | |
|
7832282545 | |
|
d161aef112 | |
|
8901aea8d7 | |
|
227cae4cdb | |
|
5e57abe86a | |
|
c04c1b3cfb | |
|
a94541a354 | |
|
84a368aa09 | |
|
6d2fb86e7d | |
|
67a3998aac | |
|
e3ca39b5db | |
|
872bfc5124 | |
|
ae5550a28d | |
|
153ca032b1 | |
|
95e727b0a8 | |
|
f1c6fc3086 | |
|
a3559526cb | |
|
a0ca2b3061 | |
|
120a19d2ba | |
|
4735e72f12 | |
|
5954dba48d | |
|
3f699c82ec | |
|
cb39ece21b | |
|
3943115b18 | |
|
97183fff97 | |
|
b4a247bf37 | |
|
3bee755eb5 | |
|
9957639be5 | |
|
a5a8e37a20 | |
|
7a1b2adc59 | |
|
7668466bc3 | |
|
ceb8cbc442 | |
|
8b0d1b3397 | |
|
77fb4963f9 | |
|
538b3cb319 | |
|
2335ceb2dc | |
|
0c347d523d | |
|
d0a214e21b | |
|
2d8e6ed9b8 | |
|
d0fcc07656 | |
|
5bf53b3d3a | |
|
280112beae | |
|
367d41f2be | |
|
61cd63bcc1 | |
|
62e2e2f9e6 | |
|
aa90a1afb0 | |
|
238c0b5911 | |
|
4d7e9133e0 | |
|
709b7b44d5 | |
|
425b011054 | |
|
b1b3c99726 | |
|
02212b8fad | |
|
70e541dea0 | |
|
cc7b7727c2 | |
|
0757c99f01 | |
|
61d97201a5 | |
|
a58aef29fb | |
|
56c882fa79 | |
|
9a3030543f | |
|
4eca23d88b | |
|
aa6df98927 | |
|
f3cac1908c | |
|
d9a519ffde | |
|
185823b040 | |
|
4774a35d44 | |
|
b4a89968d0 | |
|
5056419aa4 | |
|
a8488026d0 | |
|
6459e273f1 | |
|
42e4ee775f | |
|
b3d9e74818 | |
|
c396821cb1 | |
|
f9858a4d1a | |
|
3c1d64a089 | |
|
00fbd53b11 | |
|
99825c9a08 | |
|
4f163f2f2c | |
|
936800992c | |
|
2e9ee04c97 | |
|
8d60629034 | |
|
f54adab213 | |
|
6618409f9c | |
|
8d08027024 | |
|
9a543b1496 | |
|
b70703b7a7 | |
|
6ac0c6e9de | |
|
ecb1aaf5b5 | |
|
4c5027e0c4 | |
|
e8d75a79c5 | |
|
ff4be7cfa0 | |
|
c1cb19259e | |
|
837b6c3107 | |
|
ced6314a62 | |
|
bb6c195ae7 | |
|
c280b76777 | |
|
248da767b0 | |
|
1069b87295 | |
|
3525a65cd6 | |
|
c51a5bb365 | |
|
7f4b9aff14 | |
|
a59e7fe65f | |
|
3e0a71f2ef | |
|
3dfbca2af4 | |
|
0c256f59d8 | |
|
dbbae72c25 | |
|
b1b852d82c | |
|
437bb17f75 | |
|
fdfcb5fd33 | |
|
ff35f791f6 | |
|
b2ea37ffec | |
|
d89530d5b8 | |
|
f00050008b | |
|
68604d19c7 | |
|
55e5b5632f | |
|
5e18cb92dd | |
|
6178e0baa0 | |
|
8050bac507 | |
|
6dcdac1647 | |
|
763f6b89ef | |
|
6c28292918 | |
|
574fc55a5e | |
|
c8fd74b3a4 | |
|
6622e17a5a | |
|
ea05bd0b13 | |
|
019c98dc76 | |
|
72dfe51a46 | |
|
22cebba8ac | |
|
d51d198f94 | |
|
ed0c2d7dd3 | |
|
5ced901ae8 | |
|
afda481046 | |
|
a986864f77 | |
|
ad1c4ecbc9 | |
|
54b7de4442 | |
|
d1996aee80 | |
|
326cefbec1 | |
|
d6e81c6af7 | |
|
a000f8f2c0 | |
|
cbab09e931 | |
|
414fca08ca | |
|
874c71b7aa | |
|
5b101825f5 | |
|
0db8db4351 | |
|
d4fd148089 | |
|
c739d594d8 | |
|
05e8ad8e89 | |
|
024ab72e5f | |
|
66ec3a29ec | |
|
28a565737f | |
|
2c7116f6ba | |
|
9ccb9db6de | |
|
2d992cbb90 | |
|
302a3614cf | |
|
ea546013de | |
|
fb18610893 | |
|
2364432088 | |
|
655bed14fd | |
|
721399f665 | |
|
694ed5c581 | |
|
a98f2462ed | |
|
5461a5357d | |
|
20df9f4044 | |
|
3ec4f7c525 | |
|
443fb827d0 | |
|
a810303f52 | |
|
9370a481f9 | |
|
1478c95d59 | |
|
f69fa747af | |
|
a29a92893f | |
|
7d471056c1 | |
|
119493c181 | |
|
02a0f924b4 | |
|
38665eb00d | |
|
c32358bcef | |
|
df9316bede | |
|
8525d920a0 | |
|
a6e08d9a10 | |
|
2e0d0385b0 | |
|
972c184c70 | |
|
adeafbfcb4 | |
|
2c1c45e829 | |
|
c64f54bcb4 | |
|
6ce55b0337 | |
|
d06c4beae0 | |
|
db651e16b9 | |
|
86068790ed | |
|
ea72671351 | |
|
96b9eddf43 | |
|
bceefc8b01 | |
|
820cc69937 | |
|
1e8711be51 | |
|
e3423bc2d2 | |
|
6fbf72d0e7 | |
|
d6852bf828 | |
|
f6f4f244d7 | |
|
df35aa2a5f | |
|
799c0fce39 | |
|
2f324f28a9 | |
|
895bfe6f87 | |
|
e0669b107d | |
|
0dc201b293 | |
|
82fa0f6bce | |
|
8b93cb4a59 | |
|
647254d7f7 | |
|
3567e20600 | |
|
5348e25303 | |
|
749df3f7bb | |
|
2c2f53e5b2 | |
|
06cfafb803 | |
|
f5a37f2e86 | |
|
36747a47e0 | |
|
ffd69e8d40 | |
|
eebef3371f | |
|
4cd6701c8a | |
|
4ebe6f2a37 | |
|
d553d58fde | |
|
df40a1367a | |
|
607ee77e70 | |
|
9af493aa8a | |
|
f0c94ff789 | |
|
39c7799831 | |
|
da7371f830 | |
|
387cfefc8f |
|
@ -0,0 +1 @@
|
||||||
|
github: [meeb]
|
|
@ -4,12 +4,10 @@ env:
|
||||||
IMAGE_NAME: tubesync
|
IMAGE_NAME: tubesync
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
|
@ -27,7 +25,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install pipenv
|
pip install pipenv
|
||||||
pipenv install --system
|
pipenv install --system --skip-lock
|
||||||
- name: Set up Django environment
|
- name: Set up Django environment
|
||||||
run: cp tubesync/tubesync/local_settings.py.example tubesync/tubesync/local_settings.py
|
run: cp tubesync/tubesync/local_settings.py.example tubesync/tubesync/local_settings.py
|
||||||
- name: Run Django tests
|
- name: Run Django tests
|
||||||
|
@ -35,13 +33,24 @@ jobs:
|
||||||
containerise:
|
containerise:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Set up QEMU
|
||||||
- name: Build the container image
|
uses: docker/setup-qemu-action@v1
|
||||||
run: docker build . --tag $IMAGE_NAME
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: Log into GitHub Container Registry
|
- name: Log into GitHub Container Registry
|
||||||
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
- name: Push image to GitHub Container Registry
|
- name: Lowercase github username for ghcr
|
||||||
run: |
|
id: string
|
||||||
LATEST_TAG=ghcr.io/meeb/$IMAGE_NAME:latest
|
uses: ASzc/change-string-case-action@v1
|
||||||
docker tag $IMAGE_NAME $LATEST_TAG
|
with:
|
||||||
docker push $LATEST_TAG
|
string: ${{ github.actor }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
cache-from: type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
cache-to: type=inline
|
||||||
|
build-args: |
|
||||||
|
IMAGE_NAME=${{ env.IMAGE_NAME }}
|
||||||
|
|
|
@ -11,18 +11,28 @@ jobs:
|
||||||
containerise:
|
containerise:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
- name: Get tag
|
- name: Get tag
|
||||||
id: vars
|
id: tag
|
||||||
run: echo ::set-output name=tag::${GITHUB_REF#refs/*/}
|
uses: dawidd6/action-get-tag@v1
|
||||||
- name: Build the container image
|
- uses: docker/build-push-action@v2
|
||||||
run: docker build . --tag $IMAGE_NAME
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: Log into GitHub Container Registry
|
- name: Log into GitHub Container Registry
|
||||||
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
- name: Push image to GitHub Container Registry
|
- name: Lowercase github username for ghcr
|
||||||
env:
|
id: string
|
||||||
RELEASE_TAG: ${{ steps.vars.outputs.tag }}
|
uses: ASzc/change-string-case-action@v1
|
||||||
run: |
|
with:
|
||||||
REF_TAG=ghcr.io/meeb/$IMAGE_NAME:$RELEASE_TAG
|
string: ${{ github.actor }}
|
||||||
docker tag $IMAGE_NAME $REF_TAG
|
- name: Build and push
|
||||||
docker push $REF_TAG
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }}
|
||||||
|
cache-from: type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }}
|
||||||
|
cache-to: type=inline
|
||||||
|
build-args: |
|
||||||
|
IMAGE_NAME=${{ env.IMAGE_NAME }}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
.DS_Store
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
|
@ -130,3 +131,6 @@ dmypy.json
|
||||||
|
|
||||||
# Pyre type checker
|
# Pyre type checker
|
||||||
.pyre/
|
.pyre/
|
||||||
|
|
||||||
|
Pipfile.lock
|
||||||
|
.vscode/launch.json
|
||||||
|
|
124
Dockerfile
124
Dockerfile
|
@ -1,8 +1,9 @@
|
||||||
FROM debian:buster-slim
|
FROM debian:bookworm-slim
|
||||||
|
|
||||||
ARG ARCH="amd64"
|
ARG TARGETPLATFORM
|
||||||
ARG S6_VERSION="2.1.0.2"
|
ARG S6_VERSION="3.1.5.0"
|
||||||
ARG FFMPEG_VERSION="4.3.1"
|
ARG FFMPEG_DATE="autobuild-2023-11-29-14-19"
|
||||||
|
ARG FFMPEG_VERSION="112875-g47e214245b"
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND="noninteractive" \
|
ENV DEBIAN_FRONTEND="noninteractive" \
|
||||||
HOME="/root" \
|
HOME="/root" \
|
||||||
|
@ -10,63 +11,102 @@ ENV DEBIAN_FRONTEND="noninteractive" \
|
||||||
LANG="en_US.UTF-8" \
|
LANG="en_US.UTF-8" \
|
||||||
LC_ALL="en_US.UTF-8" \
|
LC_ALL="en_US.UTF-8" \
|
||||||
TERM="xterm" \
|
TERM="xterm" \
|
||||||
S6_EXPECTED_SHA256="52460473413601ff7a84ae690b161a074217ddc734990c2cdee9847166cf669e" \
|
S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0"
|
||||||
S6_DOWNLOAD="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-${ARCH}.tar.gz" \
|
|
||||||
FFMPEG_EXPECTED_SHA256="47d95c0129fba27d051748a442a44a73ce1bd38d1e3f9fe1e9dd7258c7581fa5" \
|
|
||||||
FFMPEG_DOWNLOAD="https://tubesync.sfo2.digitaloceanspaces.com/ffmpeg-${FFMPEG_VERSION}-${ARCH}-static.tar.xz"
|
|
||||||
|
|
||||||
|
|
||||||
# Install third party software
|
# Install third party software
|
||||||
RUN set -x && \
|
RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
|
"linux/amd64") echo "amd64" ;; \
|
||||||
|
"linux/arm64") echo "aarch64" ;; \
|
||||||
|
*) echo "" ;; esac) && \
|
||||||
|
export S6_ARCH_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
|
"linux/amd64") echo "65d0d0f353d2ff9d0af202b268b4bf53a9948a5007650854855c729289085739" ;; \
|
||||||
|
"linux/arm64") echo "3fbd14201473710a592b2189e81f00f3c8998e96d34f16bd2429c35d1bc36d00" ;; \
|
||||||
|
*) echo "" ;; esac) && \
|
||||||
|
export S6_DOWNLOAD_ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
|
"linux/amd64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-x86_64.tar.xz" ;; \
|
||||||
|
"linux/arm64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-aarch64.tar.xz" ;; \
|
||||||
|
*) echo "" ;; esac) && \
|
||||||
|
export FFMPEG_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
|
"linux/amd64") echo "36bac8c527bf390603416f749ab0dd860142b0a66f0865b67366062a9c286c8b" ;; \
|
||||||
|
"linux/arm64") echo "8f36e45d99d2367a5c0c220ee3164fa48f4f0cec35f78204ccced8dc303bfbdc" ;; \
|
||||||
|
*) echo "" ;; esac) && \
|
||||||
|
export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
|
"linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linux64-gpl.tar.xz" ;; \
|
||||||
|
"linux/arm64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linuxarm64-gpl.tar.xz" ;; \
|
||||||
|
*) echo "" ;; esac) && \
|
||||||
|
export S6_NOARCH_EXPECTED_SHA256="fd80c231e8ae1a0667b7ae2078b9ad0e1269c4d117bf447a4506815a700dbff3" && \
|
||||||
|
export S6_DOWNLOAD_NOARCH="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-noarch.tar.xz" && \
|
||||||
|
echo "Building for arch: ${ARCH}|${ARCH44}, downloading S6 from: ${S6_DOWNLOAD}}, expecting S6 SHA256: ${S6_EXPECTED_SHA256}" && \
|
||||||
|
set -x && \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
apt-get -y --no-install-recommends install locales && \
|
apt-get -y --no-install-recommends install locales && \
|
||||||
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
|
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
|
||||||
locale-gen en_US.UTF-8 && \
|
locale-gen en_US.UTF-8 && \
|
||||||
# Install required distro packages
|
# Install required distro packages
|
||||||
apt-get -y --no-install-recommends install curl xz-utils ca-certificates binutils && \
|
apt-get -y --no-install-recommends install curl ca-certificates binutils xz-utils && \
|
||||||
# Install s6
|
# Install s6
|
||||||
curl -L ${S6_DOWNLOAD} --output /tmp/s6-overlay-${ARCH}.tar.gz && \
|
curl -L ${S6_DOWNLOAD_NOARCH} --output /tmp/s6-overlay-noarch.tar.xz && \
|
||||||
sha256sum /tmp/s6-overlay-${ARCH}.tar.gz && \
|
echo "${S6_NOARCH_EXPECTED_SHA256} /tmp/s6-overlay-noarch.tar.xz" | sha256sum -c - && \
|
||||||
echo "${S6_EXPECTED_SHA256} /tmp/s6-overlay-${ARCH}.tar.gz" | sha256sum -c - && \
|
tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz && \
|
||||||
tar xzf /tmp/s6-overlay-${ARCH}.tar.gz -C / && \
|
curl -L ${S6_DOWNLOAD_ARCH} --output /tmp/s6-overlay-${ARCH}.tar.xz && \
|
||||||
|
echo "${S6_ARCH_EXPECTED_SHA256} /tmp/s6-overlay-${ARCH}.tar.xz" | sha256sum -c - && \
|
||||||
|
tar -C / -Jxpf /tmp/s6-overlay-${ARCH}.tar.xz && \
|
||||||
# Install ffmpeg
|
# Install ffmpeg
|
||||||
curl -L ${FFMPEG_DOWNLOAD} --output /tmp/ffmpeg-${ARCH}-static.tar.xz && \
|
echo "Building for arch: ${ARCH}|${ARCH44}, downloading FFMPEG from: ${FFMPEG_DOWNLOAD}, expecting FFMPEG SHA256: ${FFMPEG_EXPECTED_SHA256}" && \
|
||||||
echo "${FFMPEG_EXPECTED_SHA256} /tmp/ffmpeg-${ARCH}-static.tar.xz" | sha256sum -c - && \
|
curl -L ${FFMPEG_DOWNLOAD} --output /tmp/ffmpeg-${ARCH}.tar.xz && \
|
||||||
xz --decompress /tmp/ffmpeg-${ARCH}-static.tar.xz && \
|
sha256sum /tmp/ffmpeg-${ARCH}.tar.xz && \
|
||||||
tar -xvf /tmp/ffmpeg-${ARCH}-static.tar -C /tmp && \
|
echo "${FFMPEG_EXPECTED_SHA256} /tmp/ffmpeg-${ARCH}.tar.xz" | sha256sum -c - && \
|
||||||
install -v -s -g root -o root -m 0755 -s /tmp/ffmpeg-${FFMPEG_VERSION}-${ARCH}-static/ffmpeg -t /usr/local/bin && \
|
tar -xf /tmp/ffmpeg-${ARCH}.tar.xz --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" && \
|
||||||
|
tar -xf /tmp/ffmpeg-${ARCH}.tar.xz --strip-components=2 --no-anchored -C /usr/local/bin/ "ffprobe" && \
|
||||||
# Clean up
|
# Clean up
|
||||||
rm -rf /tmp/s6-overlay-${ARCH}.tar.gz && \
|
rm -rf /tmp/s6-overlay-${ARCH}.tar.gz && \
|
||||||
rm -rf /tmp/ffmpeg-${ARCH}-static.tar && \
|
rm -rf /tmp/ffmpeg-${ARCH}.tar.xz && \
|
||||||
rm -rf /tmp/ffmpeg-${FFMPEG_VERSION}-${ARCH}-static && \
|
apt-get -y autoremove --purge curl binutils xz-utils
|
||||||
apt-get -y autoremove --purge curl xz-utils binutils
|
|
||||||
|
|
||||||
# Copy app
|
# Copy app
|
||||||
COPY tubesync /app
|
COPY tubesync /app
|
||||||
COPY tubesync/tubesync/local_settings.py.container /app/tubesync/local_settings.py
|
COPY tubesync/tubesync/local_settings.py.container /app/tubesync/local_settings.py
|
||||||
|
|
||||||
# Append container bundled software versions
|
# Copy over pip.conf to use piwheels
|
||||||
RUN echo "ffmpeg_version = '${FFMPEG_VERSION}-static'" >> /app/common/third_party_versions.py
|
COPY pip.conf /etc/pip.conf
|
||||||
|
|
||||||
# Add Pipfile
|
# Add Pipfile
|
||||||
COPY Pipfile /app/Pipfile
|
COPY Pipfile /app/Pipfile
|
||||||
COPY Pipfile.lock /app/Pipfile.lock
|
|
||||||
|
|
||||||
# Switch workdir to the the app
|
# Switch workdir to the the app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Set up the app
|
# Set up the app
|
||||||
RUN set -x && \
|
RUN set -x && \
|
||||||
|
apt-get update && \
|
||||||
# Install required distro packages
|
# Install required distro packages
|
||||||
apt-get -y install nginx-light && \
|
apt-get -y install nginx-light && \
|
||||||
apt-get -y --no-install-recommends install python3 python3-setuptools python3-pip python3-dev gcc make && \
|
apt-get -y --no-install-recommends install \
|
||||||
# Install pipenv
|
python3 \
|
||||||
pip3 --disable-pip-version-check install pipenv && \
|
python3-dev \
|
||||||
|
python3-pip \
|
||||||
|
python3-wheel \
|
||||||
|
pipenv \
|
||||||
|
gcc \
|
||||||
|
g++ \
|
||||||
|
make \
|
||||||
|
pkgconf \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
libmariadb3 \
|
||||||
|
postgresql-common \
|
||||||
|
libpq-dev \
|
||||||
|
libpq5 \
|
||||||
|
libjpeg62-turbo \
|
||||||
|
libwebp7 \
|
||||||
|
libjpeg-dev \
|
||||||
|
zlib1g-dev \
|
||||||
|
libwebp-dev \
|
||||||
|
redis-server && \
|
||||||
# Create a 'app' user which the application will run as
|
# Create a 'app' user which the application will run as
|
||||||
groupadd app && \
|
groupadd app && \
|
||||||
useradd -M -d /app -s /bin/false -g app app && \
|
useradd -M -d /app -s /bin/false -g app app && \
|
||||||
# Install non-distro packages
|
# Install non-distro packages
|
||||||
pipenv install --system && \
|
PIPENV_VERBOSITY=64 pipenv install --system --skip-lock && \
|
||||||
# Make absolutely sure we didn't accidentally bundle a SQLite dev database
|
# Make absolutely sure we didn't accidentally bundle a SQLite dev database
|
||||||
rm -rf /app/db.sqlite3 && \
|
rm -rf /app/db.sqlite3 && \
|
||||||
# Run any required app commands
|
# Run any required app commands
|
||||||
|
@ -79,10 +119,19 @@ RUN set -x && \
|
||||||
mkdir -p /downloads/video && \
|
mkdir -p /downloads/video && \
|
||||||
# Clean up
|
# Clean up
|
||||||
rm /app/Pipfile && \
|
rm /app/Pipfile && \
|
||||||
rm /app/Pipfile.lock && \
|
|
||||||
pipenv --clear && \
|
pipenv --clear && \
|
||||||
pip3 --disable-pip-version-check uninstall -y pipenv wheel virtualenv && \
|
apt-get -y autoremove --purge \
|
||||||
apt-get -y autoremove --purge python3-pip python3-dev gcc make && \
|
python3-pip \
|
||||||
|
python3-dev \
|
||||||
|
gcc \
|
||||||
|
g++ \
|
||||||
|
make \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
postgresql-common \
|
||||||
|
libpq-dev \
|
||||||
|
libjpeg-dev \
|
||||||
|
zlib1g-dev \
|
||||||
|
libwebp-dev && \
|
||||||
apt-get -y autoremove && \
|
apt-get -y autoremove && \
|
||||||
apt-get -y autoclean && \
|
apt-get -y autoclean && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
@ -92,7 +141,12 @@ RUN set -x && \
|
||||||
rm -rf /root && \
|
rm -rf /root && \
|
||||||
mkdir -p /root && \
|
mkdir -p /root && \
|
||||||
chown root:root /root && \
|
chown root:root /root && \
|
||||||
chmod 0700 /root
|
chmod 0755 /root
|
||||||
|
|
||||||
|
# Append software versions
|
||||||
|
RUN set -x && \
|
||||||
|
FFMPEG_VERSION=$(/usr/local/bin/ffmpeg -version | head -n 1 | awk '{ print $3 }') && \
|
||||||
|
echo "ffmpeg_version = '${FFMPEG_VERSION}'" >> /app/common/third_party_versions.py
|
||||||
|
|
||||||
# Copy root
|
# Copy root
|
||||||
COPY config/root /
|
COPY config/root /
|
||||||
|
@ -102,7 +156,7 @@ HEALTHCHECK --interval=1m --timeout=10s CMD /app/healthcheck.py http://127.0.0.1
|
||||||
|
|
||||||
# ENVS and ports
|
# ENVS and ports
|
||||||
ENV PYTHONPATH "/app:${PYTHONPATH}"
|
ENV PYTHONPATH "/app:${PYTHONPATH}"
|
||||||
EXPOSE 8080
|
EXPOSE 4848
|
||||||
|
|
||||||
# Volumes
|
# Volumes
|
||||||
VOLUME ["/config", "/downloads"]
|
VOLUME ["/config", "/downloads"]
|
||||||
|
|
22
Makefile
22
Makefile
|
@ -8,17 +8,17 @@ all: clean build
|
||||||
|
|
||||||
|
|
||||||
dev:
|
dev:
|
||||||
$(python) app/manage.py runserver
|
$(python) tubesync/manage.py runserver
|
||||||
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
mkdir -p app/media
|
mkdir -p tubesync/media
|
||||||
mkdir -p app/static
|
mkdir -p tubesync/static
|
||||||
$(python) app/manage.py collectstatic --noinput
|
$(python) tubesync/manage.py collectstatic --noinput
|
||||||
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf app/static
|
rm -rf tubesync/static
|
||||||
|
|
||||||
|
|
||||||
container: clean
|
container: clean
|
||||||
|
@ -29,5 +29,13 @@ runcontainer:
|
||||||
$(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image)
|
$(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image)
|
||||||
|
|
||||||
|
|
||||||
test:
|
stopcontainer:
|
||||||
$(python) app/manage.py test --verbosity=2
|
$(docker) stop $(name)
|
||||||
|
|
||||||
|
|
||||||
|
test: build
|
||||||
|
cd tubesync && $(python) manage.py test --verbosity=2 && cd ..
|
||||||
|
|
||||||
|
|
||||||
|
shell:
|
||||||
|
cd tubesync && $(python) manage.py shell
|
||||||
|
|
15
Pipfile
15
Pipfile
|
@ -4,9 +4,10 @@ url = "https://pypi.org/simple"
|
||||||
verify_ssl = true
|
verify_ssl = true
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
|
autopep8 = "*"
|
||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
django = "*"
|
django = "~=3.2"
|
||||||
django-sass-processor = "*"
|
django-sass-processor = "*"
|
||||||
libsass = "*"
|
libsass = "*"
|
||||||
pillow = "*"
|
pillow = "*"
|
||||||
|
@ -14,9 +15,11 @@ whitenoise = "*"
|
||||||
gunicorn = "*"
|
gunicorn = "*"
|
||||||
django-compressor = "*"
|
django-compressor = "*"
|
||||||
httptools = "*"
|
httptools = "*"
|
||||||
youtube-dl = "*"
|
|
||||||
django-background-tasks = "*"
|
django-background-tasks = "*"
|
||||||
requests = "*"
|
django-basicauth = "*"
|
||||||
|
psycopg2-binary = "*"
|
||||||
[requires]
|
mysqlclient = "*"
|
||||||
python_version = "3"
|
yt-dlp = "*"
|
||||||
|
redis = "*"
|
||||||
|
hiredis = "*"
|
||||||
|
requests = {extras = ["socks"], version = "*"}
|
||||||
|
|
|
@ -1,247 +0,0 @@
|
||||||
{
|
|
||||||
"_meta": {
|
|
||||||
"hash": {
|
|
||||||
"sha256": "a4bb556fc61ee4583f9588980450b071814298ee4d1a1023fad149c14d14aaba"
|
|
||||||
},
|
|
||||||
"pipfile-spec": 6,
|
|
||||||
"requires": {
|
|
||||||
"python_version": "3"
|
|
||||||
},
|
|
||||||
"sources": [
|
|
||||||
{
|
|
||||||
"name": "pypi",
|
|
||||||
"url": "https://pypi.org/simple",
|
|
||||||
"verify_ssl": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"default": {
|
|
||||||
"asgiref": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17",
|
|
||||||
"sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0"
|
|
||||||
],
|
|
||||||
"version": "==3.3.1"
|
|
||||||
},
|
|
||||||
"certifi": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c",
|
|
||||||
"sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"
|
|
||||||
],
|
|
||||||
"version": "==2020.12.5"
|
|
||||||
},
|
|
||||||
"chardet": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
|
|
||||||
"sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
|
|
||||||
],
|
|
||||||
"version": "==4.0.0"
|
|
||||||
},
|
|
||||||
"django": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:5c866205f15e7a7123f1eec6ab939d22d5bde1416635cab259684af66d8e48a2",
|
|
||||||
"sha256:edb10b5c45e7e9c0fb1dc00b76ec7449aca258a39ffd613dbd078c51d19c9f03"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==3.1.4"
|
|
||||||
},
|
|
||||||
"django-appconf": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1b1d0e1069c843ebe8ae5aa48ec52403b1440402b320c3e3a206a0907e97bb06",
|
|
||||||
"sha256:be58deb54a43d77d2e1621fe59f787681376d3cd0b8bd8e4758ef6c3a6453380"
|
|
||||||
],
|
|
||||||
"version": "==1.0.4"
|
|
||||||
},
|
|
||||||
"django-background-tasks": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:e1b19e8d495a276c9d64c5a1ff8b41132f75d2f58e45be71b78650dad59af9de"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==1.2.5"
|
|
||||||
},
|
|
||||||
"django-compat": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3ac9a3bedc56b9365d9eb241bc5157d0c193769bf995f9a78dc1bc24e7c2331b"
|
|
||||||
],
|
|
||||||
"version": "==1.0.15"
|
|
||||||
},
|
|
||||||
"django-compressor": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:57ac0a696d061e5fc6fbc55381d2050f353b973fb97eee5593f39247bc0f30af",
|
|
||||||
"sha256:d2ed1c6137ddaac5536233ec0a819e14009553fee0a869bea65d03e5285ba74f"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.4"
|
|
||||||
},
|
|
||||||
"django-sass-processor": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:9b46a12ca8bdcb397d46fbcc49e6a926ff9f76a93c5efeb23b495419fd01fc7a"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.8.2"
|
|
||||||
},
|
|
||||||
"gunicorn": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626",
|
|
||||||
"sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==20.0.4"
|
|
||||||
},
|
|
||||||
"httptools": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0a4b1b2012b28e68306575ad14ad5e9120b34fccd02a81eb08838d7e3bbb48be",
|
|
||||||
"sha256:3592e854424ec94bd17dc3e0c96a64e459ec4147e6d53c0a42d0ebcef9cb9c5d",
|
|
||||||
"sha256:41b573cf33f64a8f8f3400d0a7faf48e1888582b6f6e02b82b9bd4f0bf7497ce",
|
|
||||||
"sha256:56b6393c6ac7abe632f2294da53f30d279130a92e8ae39d8d14ee2e1b05ad1f2",
|
|
||||||
"sha256:86c6acd66765a934e8730bf0e9dfaac6fdcf2a4334212bd4a0a1c78f16475ca6",
|
|
||||||
"sha256:96da81e1992be8ac2fd5597bf0283d832287e20cb3cfde8996d2b00356d4e17f",
|
|
||||||
"sha256:96eb359252aeed57ea5c7b3d79839aaa0382c9d3149f7d24dd7172b1bcecb009",
|
|
||||||
"sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce",
|
|
||||||
"sha256:ac0aa11e99454b6a66989aa2d44bca41d4e0f968e395a0a8f164b401fefe359a",
|
|
||||||
"sha256:bc3114b9edbca5a1eb7ae7db698c669eb53eb8afbbebdde116c174925260849c",
|
|
||||||
"sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4",
|
|
||||||
"sha256:fea04e126014169384dee76a153d4573d90d0cbd1d12185da089f73c78390437"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.1.1"
|
|
||||||
},
|
|
||||||
"idna": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
|
|
||||||
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
|
|
||||||
],
|
|
||||||
"version": "==2.10"
|
|
||||||
},
|
|
||||||
"libsass": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1521d2a8d4b397c6ec90640a1f6b5529077035efc48ef1c2e53095544e713d1b",
|
|
||||||
"sha256:1b2d415bbf6fa7da33ef46e549db1418498267b459978eff8357e5e823962d35",
|
|
||||||
"sha256:25ebc2085f5eee574761ccc8d9cd29a9b436fc970546d5ef08c6fa41eb57dff1",
|
|
||||||
"sha256:2ae806427b28bc1bb7cb0258666d854fcf92ba52a04656b0b17ba5e190fb48a9",
|
|
||||||
"sha256:4a246e4b88fd279abef8b669206228c92534d96ddcd0770d7012088c408dff23",
|
|
||||||
"sha256:553e5096414a8d4fb48d0a48f5a038d3411abe254d79deac5e008516c019e63a",
|
|
||||||
"sha256:697f0f9fa8a1367ca9ec6869437cb235b1c537fc8519983d1d890178614a8903",
|
|
||||||
"sha256:a8fd4af9f853e8bf42b1425c5e48dd90b504fa2e70d7dac5ac80b8c0a5a5fe85",
|
|
||||||
"sha256:c9411fec76f480ffbacc97d8188322e02a5abca6fc78e70b86a2a2b421eae8a2",
|
|
||||||
"sha256:daa98a51086d92aa7e9c8871cf1a8258124b90e2abf4697852a3dca619838618",
|
|
||||||
"sha256:e0e60836eccbf2d9e24ec978a805cd6642fa92515fbd95e3493fee276af76f8a",
|
|
||||||
"sha256:e64ae2587f1a683e831409aad03ba547c245ef997e1329fffadf7a866d2510b8",
|
|
||||||
"sha256:f6852828e9e104d2ce0358b73c550d26dd86cc3a69439438c3b618811b9584f5"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.20.1"
|
|
||||||
},
|
|
||||||
"pillow": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:006de60d7580d81f4a1a7e9f0173dc90a932e3905cc4d47ea909bc946302311a",
|
|
||||||
"sha256:0a2e8d03787ec7ad71dc18aec9367c946ef8ef50e1e78c71f743bc3a770f9fae",
|
|
||||||
"sha256:0eeeae397e5a79dc088d8297a4c2c6f901f8fb30db47795113a4a605d0f1e5ce",
|
|
||||||
"sha256:11c5c6e9b02c9dac08af04f093eb5a2f84857df70a7d4a6a6ad461aca803fb9e",
|
|
||||||
"sha256:2fb113757a369a6cdb189f8df3226e995acfed0a8919a72416626af1a0a71140",
|
|
||||||
"sha256:4b0ef2470c4979e345e4e0cc1bbac65fda11d0d7b789dbac035e4c6ce3f98adb",
|
|
||||||
"sha256:59e903ca800c8cfd1ebe482349ec7c35687b95e98cefae213e271c8c7fffa021",
|
|
||||||
"sha256:5abd653a23c35d980b332bc0431d39663b1709d64142e3652890df4c9b6970f6",
|
|
||||||
"sha256:5f9403af9c790cc18411ea398a6950ee2def2a830ad0cfe6dc9122e6d528b302",
|
|
||||||
"sha256:6b4a8fd632b4ebee28282a9fef4c341835a1aa8671e2770b6f89adc8e8c2703c",
|
|
||||||
"sha256:6c1aca8231625115104a06e4389fcd9ec88f0c9befbabd80dc206c35561be271",
|
|
||||||
"sha256:795e91a60f291e75de2e20e6bdd67770f793c8605b553cb6e4387ce0cb302e09",
|
|
||||||
"sha256:7ba0ba61252ab23052e642abdb17fd08fdcfdbbf3b74c969a30c58ac1ade7cd3",
|
|
||||||
"sha256:7c9401e68730d6c4245b8e361d3d13e1035cbc94db86b49dc7da8bec235d0015",
|
|
||||||
"sha256:81f812d8f5e8a09b246515fac141e9d10113229bc33ea073fec11403b016bcf3",
|
|
||||||
"sha256:895d54c0ddc78a478c80f9c438579ac15f3e27bf442c2a9aa74d41d0e4d12544",
|
|
||||||
"sha256:8de332053707c80963b589b22f8e0229f1be1f3ca862a932c1bcd48dafb18dd8",
|
|
||||||
"sha256:92c882b70a40c79de9f5294dc99390671e07fc0b0113d472cbea3fde15db1792",
|
|
||||||
"sha256:95edb1ed513e68bddc2aee3de66ceaf743590bf16c023fb9977adc4be15bd3f0",
|
|
||||||
"sha256:b63d4ff734263ae4ce6593798bcfee6dbfb00523c82753a3a03cbc05555a9cc3",
|
|
||||||
"sha256:bd7bf289e05470b1bc74889d1466d9ad4a56d201f24397557b6f65c24a6844b8",
|
|
||||||
"sha256:cc3ea6b23954da84dbee8025c616040d9aa5eaf34ea6895a0a762ee9d3e12e11",
|
|
||||||
"sha256:cc9ec588c6ef3a1325fa032ec14d97b7309db493782ea8c304666fb10c3bd9a7",
|
|
||||||
"sha256:d3d07c86d4efa1facdf32aa878bd508c0dc4f87c48125cc16b937baa4e5b5e11",
|
|
||||||
"sha256:d8a96747df78cda35980905bf26e72960cba6d355ace4780d4bdde3b217cdf1e",
|
|
||||||
"sha256:e38d58d9138ef972fceb7aeec4be02e3f01d383723965bfcef14d174c8ccd039",
|
|
||||||
"sha256:eb472586374dc66b31e36e14720747595c2b265ae962987261f044e5cce644b5",
|
|
||||||
"sha256:fbd922f702582cb0d71ef94442bfca57624352622d75e3be7a1e7e9360b07e72"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==8.0.1"
|
|
||||||
},
|
|
||||||
"pytz": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
|
|
||||||
"sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
|
|
||||||
],
|
|
||||||
"version": "==2020.4"
|
|
||||||
},
|
|
||||||
"rcssmin": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:ca87b695d3d7864157773a61263e5abb96006e9ff0e021eff90cbe0e1ba18270"
|
|
||||||
],
|
|
||||||
"version": "==1.0.6"
|
|
||||||
},
|
|
||||||
"requests": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
|
|
||||||
"sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.25.1"
|
|
||||||
},
|
|
||||||
"rjsmin": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0ab825839125eaca57cc59581d72e596e58a7a56fbc0839996b7528f0343a0a8",
|
|
||||||
"sha256:211c2fe8298951663bbc02acdffbf714f6793df54bfc50e1c6c9e71b3f2559a3",
|
|
||||||
"sha256:466fe70cc5647c7c51b3260c7e2e323a98b2b173564247f9c89e977720a0645f",
|
|
||||||
"sha256:585e75a84d9199b68056fd4a083d9a61e2a92dfd10ff6d4ce5bdb04bc3bdbfaf",
|
|
||||||
"sha256:6044ca86e917cd5bb2f95e6679a4192cef812122f28ee08c677513de019629b3",
|
|
||||||
"sha256:714329db774a90947e0e2086cdddb80d5e8c4ac1c70c9f92436378dedb8ae345",
|
|
||||||
"sha256:799890bd07a048892d8d3deb9042dbc20b7f5d0eb7da91e9483c561033b23ce2",
|
|
||||||
"sha256:975b69754d6a76be47c0bead12367a1ca9220d08e5393f80bab0230d4625d1f4",
|
|
||||||
"sha256:b15dc75c71f65d9493a8c7fa233fdcec823e3f1b88ad84a843ffef49b338ac32",
|
|
||||||
"sha256:dd0f4819df4243ffe4c964995794c79ca43943b5b756de84be92b445a652fb86",
|
|
||||||
"sha256:e3908b21ebb584ce74a6ac233bdb5f29485752c9d3be5e50c5484ed74169232c",
|
|
||||||
"sha256:e487a7783ac4339e79ec610b98228eb9ac72178973e3dee16eba0e3feef25924",
|
|
||||||
"sha256:ecd29f1b3e66a4c0753105baec262b331bcbceefc22fbe6f7e8bcd2067bcb4d7"
|
|
||||||
],
|
|
||||||
"version": "==1.1.0"
|
|
||||||
},
|
|
||||||
"six": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
|
|
||||||
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
|
|
||||||
],
|
|
||||||
"version": "==1.15.0"
|
|
||||||
},
|
|
||||||
"sqlparse": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0",
|
|
||||||
"sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8"
|
|
||||||
],
|
|
||||||
"version": "==0.4.1"
|
|
||||||
},
|
|
||||||
"urllib3": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
|
|
||||||
"sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
|
|
||||||
],
|
|
||||||
"version": "==1.26.2"
|
|
||||||
},
|
|
||||||
"whitenoise": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:05ce0be39ad85740a78750c86a93485c40f08ad8c62a6006de0233765996e5c7",
|
|
||||||
"sha256:05d00198c777028d72d8b0bbd234db605ef6d60e9410125124002518a48e515d"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==5.2.0"
|
|
||||||
},
|
|
||||||
"youtube-dl": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:65968065e66966955dc79fad9251565fcc982566118756da624bd21467f3a04c",
|
|
||||||
"sha256:eaa859f15b6897bec21474b7787dc958118c8088e1f24d4ef1d58eab13188958"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2020.12.14"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"develop": {}
|
|
||||||
}
|
|
118
README.md
118
README.md
|
@ -9,10 +9,10 @@ downloaded.
|
||||||
|
|
||||||
If you want to watch YouTube videos in particular quality or settings from your local
|
If you want to watch YouTube videos in particular quality or settings from your local
|
||||||
media server, then TubeSync is for you. Internally, TubeSync is a web interface wrapper
|
media server, then TubeSync is for you. Internally, TubeSync is a web interface wrapper
|
||||||
on `youtube-dl` and `ffmpeg` with a task scheduler.
|
on `yt-dlp` and `ffmpeg` with a task scheduler.
|
||||||
|
|
||||||
There are several other web interfaces to YouTube and `youtube-dl` all with varying
|
There are several other web interfaces to YouTube and `yt-dlp` all with varying
|
||||||
features and implemenations. TubeSync's largest difference is full PVR experience of
|
features and implementations. TubeSync's largest difference is full PVR experience of
|
||||||
updating media servers and better selection of media formats. Additionally, to be as
|
updating media servers and better selection of media formats. Additionally, to be as
|
||||||
hands-free as possible, TubeSync has gradual retrying of failures with back-off timers
|
hands-free as possible, TubeSync has gradual retrying of failures with back-off timers
|
||||||
so media which fails to download will be retried for an extended period making it,
|
so media which fails to download will be retried for an extended period making it,
|
||||||
|
@ -22,12 +22,9 @@ hopefully, quite reliable.
|
||||||
# Latest container image
|
# Latest container image
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ghcr.io/meeb/tubesync:v0.7
|
ghcr.io/meeb/tubesync:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
**NOTE: the `:latest` tag does exist, but will contain in-development commits and may
|
|
||||||
be broken. Use at your own risk.**
|
|
||||||
|
|
||||||
# Screenshots
|
# Screenshots
|
||||||
|
|
||||||
### Dashboard
|
### Dashboard
|
||||||
|
@ -72,11 +69,12 @@ currently just Plex, to complete the PVR experience.
|
||||||
# Installation
|
# Installation
|
||||||
|
|
||||||
TubeSync is designed to be run in a container, such as via Docker or Podman. It also
|
TubeSync is designed to be run in a container, such as via Docker or Podman. It also
|
||||||
works in a Docker Compose stack. Only `amd64` is initially supported.
|
works in a Docker Compose stack. `amd64` (most desktop PCs and servers) and `arm64`
|
||||||
|
(modern ARM computers, such as the Rasperry Pi 3 or later) are supported.
|
||||||
|
|
||||||
Example (with Docker on *nix):
|
Example (with Docker on *nix):
|
||||||
|
|
||||||
First find your the user ID and group ID you want to run TubeSync as, if you're not
|
First find the user ID and group ID you want to run TubeSync as, if you're not
|
||||||
sure what this is it's probably your current user ID and group ID:
|
sure what this is it's probably your current user ID and group ID:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -101,8 +99,8 @@ $ mkdir /some/directory/tubesync-downloads
|
||||||
Finally, download and run the container:
|
Finally, download and run the container:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Pull a versioned image
|
# Pull image
|
||||||
$ docker pull ghcr.io/meeb/tubesync:v0.7
|
$ docker pull ghcr.io/meeb/tubesync:latest
|
||||||
# Start the container using your user ID and group ID
|
# Start the container using your user ID and group ID
|
||||||
$ docker run \
|
$ docker run \
|
||||||
-d \
|
-d \
|
||||||
|
@ -113,19 +111,21 @@ $ docker run \
|
||||||
-v /some/directory/tubesync-config:/config \
|
-v /some/directory/tubesync-config:/config \
|
||||||
-v /some/directory/tubesync-downloads:/downloads \
|
-v /some/directory/tubesync-downloads:/downloads \
|
||||||
-p 4848:4848 \
|
-p 4848:4848 \
|
||||||
ghcr.io/meeb/tubesync:v0.7
|
ghcr.io/meeb/tubesync:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
Once running, open `http://localhost:4848` in your browser and you should see the
|
Once running, open `http://localhost:4848` in your browser and you should see the
|
||||||
TubeSync dashboard. If you do, you can proceed to adding some sources (YouTube channels
|
TubeSync dashboard. If you do, you can proceed to adding some sources (YouTube channels
|
||||||
and playlists). If not, check `docker logs tubesync` to see what errors might be
|
and playlists). If not, check `docker logs tubesync` to see what errors might be
|
||||||
occuring, typical ones are file permission issues.
|
occurring, typical ones are file permission issues.
|
||||||
|
|
||||||
Alternatively, for Docker Compose, you can use something like:
|
Alternatively, for Docker Compose, you can use something like:
|
||||||
|
|
||||||
```yaml
|
```yml
|
||||||
|
version: '3.7'
|
||||||
|
services:
|
||||||
tubesync:
|
tubesync:
|
||||||
image: ghcr.io/meeb/tubesync:v0.7
|
image: ghcr.io/meeb/tubesync:latest
|
||||||
container_name: tubesync
|
container_name: tubesync
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
|
@ -139,6 +139,41 @@ Alternatively, for Docker Compose, you can use something like:
|
||||||
- PGID=1000
|
- PGID=1000
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Optional authentication
|
||||||
|
|
||||||
|
Available in `v1.0` (or `:latest`)and later. If you want to enable a basic username and
|
||||||
|
password to be required to access the TubeSync dashboard you can set them with the
|
||||||
|
following environment variables:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
HTTP_USER
|
||||||
|
HTTP_PASS
|
||||||
|
```
|
||||||
|
|
||||||
|
For example, in the `docker run ...` line add in:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
...
|
||||||
|
-e HTTP_USER=some-username \
|
||||||
|
-e HTTP_PASS=some-secure-password \
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Or in your Docker Compose file you would add in:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
...
|
||||||
|
environment:
|
||||||
|
- HTTP_USER=some-username
|
||||||
|
- HTTP_PASS=some-secure-password
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
When BOTH `HTTP_USER` and `HTTP_PASS` are set then basic HTTP authentication will be
|
||||||
|
enabled.
|
||||||
|
|
||||||
|
|
||||||
# Updating
|
# Updating
|
||||||
|
|
||||||
To update, you can just pull a new version of the container image as they are released.
|
To update, you can just pull a new version of the container image as they are released.
|
||||||
|
@ -196,6 +231,19 @@ $ docker logs --follow tubesync
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
# Advanced usage guides
|
||||||
|
|
||||||
|
Once you're happy using TubeSync there are some advanced usage guides for more complex
|
||||||
|
and less common features:
|
||||||
|
|
||||||
|
* [Import existing media into TubeSync](https://github.com/meeb/tubesync/blob/main/docs/import-existing-media.md)
|
||||||
|
* [Sync or create missing metadata files](https://github.com/meeb/tubesync/blob/main/docs/create-missing-metadata.md)
|
||||||
|
* [Reset tasks from the command line](https://github.com/meeb/tubesync/blob/main/docs/reset-tasks.md)
|
||||||
|
* [Using PostgreSQL, MySQL or MariaDB as database backends](https://github.com/meeb/tubesync/blob/main/docs/other-database-backends.md)
|
||||||
|
* [Using cookies](https://github.com/meeb/tubesync/blob/main/docs/using-cookies.md)
|
||||||
|
* [Reset metadata](https://github.com/meeb/tubesync/blob/main/docs/reset-metadata.md)
|
||||||
|
|
||||||
|
|
||||||
# Warnings
|
# Warnings
|
||||||
|
|
||||||
### 1. Index frequency
|
### 1. Index frequency
|
||||||
|
@ -235,7 +283,7 @@ automatically.
|
||||||
### Does TubeSync support any other video platforms?
|
### Does TubeSync support any other video platforms?
|
||||||
|
|
||||||
At the moment, no. This is a pre-release. The library TubeSync uses that does most
|
At the moment, no. This is a pre-release. The library TubeSync uses that does most
|
||||||
of the downloading work, `youtube-dl`, supports many hundreds of video sources so it's
|
of the downloading work, `yt-dlp`, supports many hundreds of video sources so it's
|
||||||
likely more will be added to TubeSync if there is demand for it.
|
likely more will be added to TubeSync if there is demand for it.
|
||||||
|
|
||||||
### Is there a progress bar?
|
### Is there a progress bar?
|
||||||
|
@ -247,27 +295,27 @@ your install is doing check the container logs.
|
||||||
|
|
||||||
### Are there alerts when a download is complete?
|
### Are there alerts when a download is complete?
|
||||||
|
|
||||||
No, this feature is best served by existing services such as the execelent
|
No, this feature is best served by existing services such as the excellent
|
||||||
[Tautulli](https://tautulli.com/) which can monitor your Plex server and send alerts
|
[Tautulli](https://tautulli.com/) which can monitor your Plex server and send alerts
|
||||||
that way.
|
that way.
|
||||||
|
|
||||||
### There's errors in my "tasks" tab!
|
### There are errors in my "tasks" tab!
|
||||||
|
|
||||||
You only really need to worry about these if there is a permanent failure. Some errors
|
You only really need to worry about these if there is a permanent failure. Some errors
|
||||||
are temproary and will be retried for you automatically, such as a download got
|
are temporary and will be retried for you automatically, such as a download got
|
||||||
interrupted and will be tried again later. Sources with permanet errors (such as no
|
interrupted and will be tried again later. Sources with permanent errors (such as no
|
||||||
media available because you got a channel name wrong) will be shown as errors on the
|
media available because you got a channel name wrong) will be shown as errors on the
|
||||||
"sources" tab.
|
"sources" tab.
|
||||||
|
|
||||||
### What is TubeSync written in?
|
### What is TubeSync written in?
|
||||||
|
|
||||||
Python3 using Django, embedding youtube-dl. It's pretty much glue between other much
|
Python3 using Django, embedding yt-dlp. It's pretty much glue between other much
|
||||||
larger libraries.
|
larger libraries.
|
||||||
|
|
||||||
Notable libraries and software used:
|
Notable libraries and software used:
|
||||||
|
|
||||||
* [Django](https://www.djangoproject.com/)
|
* [Django](https://www.djangoproject.com/)
|
||||||
* [youtube-dl](https://yt-dl.org/)
|
* [yt-dlp](https://github.com/yt-dlp/yt-dlp)
|
||||||
* [ffmpeg](https://ffmpeg.org/)
|
* [ffmpeg](https://ffmpeg.org/)
|
||||||
* [Django Background Tasks](https://github.com/arteria/django-background-tasks/)
|
* [Django Background Tasks](https://github.com/arteria/django-background-tasks/)
|
||||||
* [django-sass](https://github.com/coderedcorp/django-sass/)
|
* [django-sass](https://github.com/coderedcorp/django-sass/)
|
||||||
|
@ -277,7 +325,7 @@ See the [Pipefile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a ful
|
||||||
|
|
||||||
### Can I get access to the full Django admin?
|
### Can I get access to the full Django admin?
|
||||||
|
|
||||||
Yes, although pretty much all operations are available through the front end interface
|
Yes, although pretty much all operations are available through the front-end interface
|
||||||
and you can probably break things by playing in the admin. If you still want to access
|
and you can probably break things by playing in the admin. If you still want to access
|
||||||
it you can run:
|
it you can run:
|
||||||
|
|
||||||
|
@ -290,7 +338,9 @@ can log in at http://localhost:4848/admin
|
||||||
|
|
||||||
### Are there user accounts or multi-user support?
|
### Are there user accounts or multi-user support?
|
||||||
|
|
||||||
No not at the moment. This could be added later if there is demand for it.
|
There is support for basic HTTP authentication by setting the `HTTP_USER` and
|
||||||
|
`HTTP_PASS` environment variables. There is not support for multi-user or user
|
||||||
|
management.
|
||||||
|
|
||||||
### Does TubeSync support HTTPS?
|
### Does TubeSync support HTTPS?
|
||||||
|
|
||||||
|
@ -301,6 +351,10 @@ etc.). Configuration of this is beyond the scope of this README.
|
||||||
|
|
||||||
Just `amd64` for the moment. Others may be made available if there is demand.
|
Just `amd64` for the moment. Others may be made available if there is demand.
|
||||||
|
|
||||||
|
### The pipenv install fails with "Locking failed"!
|
||||||
|
|
||||||
|
Make sure that you have `mysql_config` or `mariadb_config` available, as required by the python module `mysqlclient`. On Debian-based systems this is usually found in the package `libmysqlclient-dev`
|
||||||
|
|
||||||
|
|
||||||
# Advanced configuration
|
# Advanced configuration
|
||||||
|
|
||||||
|
@ -309,21 +363,25 @@ There are a number of other environment variables you can set. These are, mostly
|
||||||
useful if you are manually installing TubeSync in some other environment. These are:
|
useful if you are manually installing TubeSync in some other environment. These are:
|
||||||
|
|
||||||
| Name | What | Example |
|
| Name | What | Example |
|
||||||
| ------------------------ | ------------------------------------------------------------ | ---------------------------------- |
|
| --------------------------- | ------------------------------------------------------------ | ------------------------------------ |
|
||||||
| DJANGO_SECRET_KEY | Django's SECRET_KEY | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l |
|
| DJANGO_SECRET_KEY | Django's SECRET_KEY | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l |
|
||||||
| DJANGO_FORCE_SCRIPT_NAME | Django's FORCE_SCRIPT_NAME | /somepath |
|
| DJANGO_URL_PREFIX | Run TubeSync in a sub-URL on the web server | /somepath/ |
|
||||||
| TUBESYNC_DEBUG | Enable debugging | True |
|
| TUBESYNC_DEBUG | Enable debugging | True |
|
||||||
| TUBESYNC_WORKERS | Number of background workers, default is 2, max allowed is 8 | 2 |
|
| TUBESYNC_WORKERS | Number of background workers, default is 2, max allowed is 8 | 2 |
|
||||||
| TUBESYNC_HOSTS | Django's ALLOWED_HOSTS | tubesync.example.com,otherhost.com |
|
| TUBESYNC_HOSTS | Django's ALLOWED_HOSTS, defaults to `*` | tubesync.example.com,otherhost.com |
|
||||||
|
| TUBESYNC_RESET_DOWNLOAD_DIR | Toggle resetting `/downloads` permissions, defaults to True | True
|
||||||
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
|
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
|
||||||
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
|
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
|
||||||
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
|
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
|
||||||
|
| HTTP_USER | Sets the username for HTTP basic authentication | some-username |
|
||||||
|
| HTTP_PASS | Sets the password for HTTP basic authentication | some-secure-password |
|
||||||
|
| DATABASE_CONNECTION | Optional external database connection details | mysql://user:pass@host:port/database |
|
||||||
|
|
||||||
|
|
||||||
# Manual, non-containerised, installation
|
# Manual, non-containerised, installation
|
||||||
|
|
||||||
As a relatively normal Django app you can run TubeSync without the container. Beyond
|
As a relatively normal Django app you can run TubeSync without the container. Beyond
|
||||||
following this rough guide you are on your own and should be knowledgeable about
|
following this rough guide, you are on your own and should be knowledgeable about
|
||||||
installing and running WSGI-based Python web applications before attempting this.
|
installing and running WSGI-based Python web applications before attempting this.
|
||||||
|
|
||||||
1. Clone or download this repo
|
1. Clone or download this repo
|
||||||
|
@ -334,7 +392,7 @@ installing and running WSGI-based Python web applications before attempting this
|
||||||
`tubesync/tubesync/local_settings.py` and edit it as appropriate
|
`tubesync/tubesync/local_settings.py` and edit it as appropriate
|
||||||
5. Run migrations with `./manage.py migrate`
|
5. Run migrations with `./manage.py migrate`
|
||||||
6. Collect static files with `./manage.py collectstatic`
|
6. Collect static files with `./manage.py collectstatic`
|
||||||
6. Set up your prefered WSGI server, such as `gunicorn` poiting it to the application
|
6. Set up your prefered WSGI server, such as `gunicorn` pointing it to the application
|
||||||
in `tubesync/tubesync/wsgi.py`
|
in `tubesync/tubesync/wsgi.py`
|
||||||
7. Set up your proxy server such as `nginx` and forward it to the WSGI server
|
7. Set up your proxy server such as `nginx` and forward it to the WSGI server
|
||||||
8. Check the web interface is working
|
8. Check the web interface is working
|
||||||
|
@ -346,7 +404,7 @@ installing and running WSGI-based Python web applications before attempting this
|
||||||
|
|
||||||
# Tests
|
# Tests
|
||||||
|
|
||||||
There is a moderately comprehensive test suite focussing on the custom media format
|
There is a moderately comprehensive test suite focusing on the custom media format
|
||||||
matching logic and that the front-end interface works. You can run it via Django:
|
matching logic and that the front-end interface works. You can run it via Django:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
#!/usr/bin/with-contenv bash
|
|
||||||
|
|
||||||
# Change runtime user UID and GID
|
|
||||||
PUID=${PUID:-911}
|
|
||||||
PGID=${PGID:-911}
|
|
||||||
groupmod -o -g "$PGID" app
|
|
||||||
usermod -o -u "$PUID" app
|
|
||||||
|
|
||||||
# Reset permissions
|
|
||||||
chown -R app:app /run/app && \
|
|
||||||
chmod -R 0700 /run/app && \
|
|
||||||
chown -R app:app /config && \
|
|
||||||
chmod -R 0755 /config && \
|
|
||||||
chown -R app:app /downloads && \
|
|
||||||
chmod -R 0755 /downloads && \
|
|
||||||
chown -R root:app /app && \
|
|
||||||
chmod -R 0750 /app && \
|
|
||||||
chown -R app:app /app/common/static && \
|
|
||||||
chmod -R 0750 /app/common/static && \
|
|
||||||
chown -R app:app /app/static && \
|
|
||||||
chmod -R 0750 /app/static && \
|
|
||||||
find /app -type f -exec chmod 640 {} \; && \
|
|
||||||
chmod +x /app/healthcheck.py
|
|
||||||
|
|
||||||
# Run migrations
|
|
||||||
exec s6-setuidgid app \
|
|
||||||
/usr/bin/python3 /app/manage.py migrate
|
|
|
@ -79,6 +79,11 @@ http {
|
||||||
proxy_connect_timeout 10;
|
proxy_connect_timeout 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# File dwnload and streaming
|
||||||
|
location /media-data/ {
|
||||||
|
internal;
|
||||||
|
alias /downloads/;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
bind 127.0.0.1
|
||||||
|
protected-mode yes
|
||||||
|
port 6379
|
||||||
|
tcp-backlog 511
|
||||||
|
timeout 0
|
||||||
|
tcp-keepalive 300
|
||||||
|
daemonize no
|
||||||
|
supervised no
|
||||||
|
loglevel notice
|
||||||
|
logfile ""
|
||||||
|
databases 1
|
||||||
|
always-show-logo no
|
||||||
|
save ""
|
||||||
|
dir /var/lib/redis
|
||||||
|
maxmemory 64mb
|
||||||
|
maxmemory-policy noeviction
|
||||||
|
lazyfree-lazy-eviction no
|
||||||
|
lazyfree-lazy-expire no
|
||||||
|
lazyfree-lazy-server-del no
|
||||||
|
replica-lazy-flush no
|
||||||
|
lazyfree-lazy-user-del no
|
||||||
|
oom-score-adj no
|
||||||
|
oom-score-adj-values 0 200 800
|
||||||
|
appendonly no
|
||||||
|
appendfsync no
|
||||||
|
lua-time-limit 5000
|
||||||
|
slowlog-log-slower-than 10000
|
||||||
|
slowlog-max-len 128
|
||||||
|
latency-monitor-threshold 0
|
||||||
|
notify-keyspace-events ""
|
||||||
|
hash-max-ziplist-entries 512
|
||||||
|
hash-max-ziplist-value 64
|
||||||
|
list-max-ziplist-size -2
|
||||||
|
list-compress-depth 0
|
||||||
|
set-max-intset-entries 512
|
||||||
|
zset-max-ziplist-entries 128
|
||||||
|
zset-max-ziplist-value 64
|
||||||
|
hll-sparse-max-bytes 3000
|
||||||
|
stream-node-max-bytes 4096
|
||||||
|
stream-node-max-entries 100
|
||||||
|
activerehashing yes
|
||||||
|
client-output-buffer-limit normal 0 0 0
|
||||||
|
client-output-buffer-limit replica 256mb 64mb 60
|
||||||
|
client-output-buffer-limit pubsub 32mb 8mb 60
|
||||||
|
hz 10
|
||||||
|
dynamic-hz yes
|
|
@ -0,0 +1 @@
|
||||||
|
gunicorn
|
|
@ -0,0 +1,25 @@
|
||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
|
||||||
|
UMASK_SET=${UMASK_SET:-022}
|
||||||
|
umask "$UMASK_SET"
|
||||||
|
|
||||||
|
cd /app || exit
|
||||||
|
|
||||||
|
PIDFILE=/run/app/celery-beat.pid
|
||||||
|
SCHEDULE=/tmp/tubesync-celerybeat-schedule
|
||||||
|
|
||||||
|
if [ -f "${PIDFILE}" ]
|
||||||
|
then
|
||||||
|
PID=$(cat $PIDFILE)
|
||||||
|
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
|
||||||
|
if kill -0 $PID
|
||||||
|
then
|
||||||
|
echo "Killing old gunicorn process with PID: ${PID}"
|
||||||
|
kill -9 $PID
|
||||||
|
fi
|
||||||
|
echo "Removing stale PID file: ${PIDFILE}"
|
||||||
|
rm ${PIDFILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
#exec s6-setuidgid app \
|
||||||
|
# /usr/local/bin/celery --workdir /app -A tubesync beat --pidfile ${PIDFILE} -s ${SCHEDULE}
|
|
@ -0,0 +1 @@
|
||||||
|
longrun
|
|
@ -0,0 +1 @@
|
||||||
|
gunicorn
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
|
||||||
|
UMASK_SET=${UMASK_SET:-022}
|
||||||
|
umask "$UMASK_SET"
|
||||||
|
|
||||||
|
cd /app || exit
|
||||||
|
|
||||||
|
PIDFILE=/run/app/celery-worker.pid
|
||||||
|
|
||||||
|
if [ -f "${PIDFILE}" ]
|
||||||
|
then
|
||||||
|
PID=$(cat $PIDFILE)
|
||||||
|
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
|
||||||
|
if kill -0 $PID
|
||||||
|
then
|
||||||
|
echo "Killing old gunicorn process with PID: ${PID}"
|
||||||
|
kill -9 $PID
|
||||||
|
fi
|
||||||
|
echo "Removing stale PID file: ${PIDFILE}"
|
||||||
|
rm ${PIDFILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
#exec s6-setuidgid app \
|
||||||
|
# /usr/local/bin/celery --workdir /app -A tubesync worker --pidfile ${PIDFILE} -l INFO
|
|
@ -0,0 +1 @@
|
||||||
|
longrun
|
|
@ -0,0 +1 @@
|
||||||
|
tubesync-init
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
UMASK_SET=${UMASK_SET:-022}
|
||||||
|
umask "$UMASK_SET"
|
||||||
|
|
||||||
|
cd /app || exit
|
||||||
|
|
||||||
|
PIDFILE=/run/app/gunicorn.pid
|
||||||
|
|
||||||
|
if [ -f "${PIDFILE}" ]
|
||||||
|
then
|
||||||
|
PID=$(cat $PIDFILE)
|
||||||
|
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
|
||||||
|
if kill -0 $PID
|
||||||
|
then
|
||||||
|
echo "Killing old gunicorn process with PID: ${PID}"
|
||||||
|
kill -9 $PID
|
||||||
|
fi
|
||||||
|
echo "Removing stale PID file: ${PIDFILE}"
|
||||||
|
rm ${PIDFILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec s6-setuidgid app \
|
||||||
|
/usr/local/bin/gunicorn -c /app/tubesync/gunicorn.py --capture-output tubesync.wsgi:application
|
|
@ -0,0 +1 @@
|
||||||
|
longrun
|
|
@ -0,0 +1 @@
|
||||||
|
gunicorn
|
|
@ -0,0 +1,5 @@
|
||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
cd /
|
||||||
|
|
||||||
|
/usr/sbin/nginx
|
|
@ -0,0 +1 @@
|
||||||
|
longrun
|
|
@ -0,0 +1,4 @@
|
||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
exec s6-setuidgid redis \
|
||||||
|
/usr/bin/redis-server /etc/redis/redis.conf
|
|
@ -0,0 +1 @@
|
||||||
|
longrun
|
|
@ -0,0 +1,34 @@
|
||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
# Change runtime user UID and GID
|
||||||
|
PUID="${PUID:-911}"
|
||||||
|
PUID="${PUID:-911}"
|
||||||
|
groupmod -o -g "$PGID" app
|
||||||
|
usermod -o -u "$PUID" app
|
||||||
|
|
||||||
|
# Reset permissions
|
||||||
|
chown -R app:app /run/app
|
||||||
|
chmod -R 0700 /run/app
|
||||||
|
chown -R app:app /config
|
||||||
|
chmod -R 0755 /config
|
||||||
|
chown -R root:app /app
|
||||||
|
chmod -R 0750 /app
|
||||||
|
chown -R app:app /app/common/static
|
||||||
|
chmod -R 0750 /app/common/static
|
||||||
|
chown -R app:app /app/static
|
||||||
|
chmod -R 0750 /app/static
|
||||||
|
find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \;
|
||||||
|
chmod 0755 /app/healthcheck.py
|
||||||
|
|
||||||
|
# Optionally reset the download dir permissions
|
||||||
|
TUBESYNC_RESET_DOWNLOAD_DIR="${TUBESYNC_RESET_DOWNLOAD_DIR:-True}"
|
||||||
|
if [ "$TUBESYNC_RESET_DOWNLOAD_DIR" == "True" ]
|
||||||
|
then
|
||||||
|
echo "TUBESYNC_RESET_DOWNLOAD_DIR=True, Resetting /downloads directory permissions"
|
||||||
|
chown -R app:app /downloads
|
||||||
|
chmod -R 0755 /downloads
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
exec s6-setuidgid app \
|
||||||
|
/usr/bin/python3 /app/manage.py migrate
|
|
@ -0,0 +1 @@
|
||||||
|
oneshot
|
|
@ -0,0 +1,3 @@
|
||||||
|
#!/command/execlineb -P
|
||||||
|
|
||||||
|
/etc/s6-overlay/s6-rc.d/tubesync-init/run
|
|
@ -0,0 +1 @@
|
||||||
|
gunicorn
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/with-contenv bash
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
exec s6-setuidgid app \
|
exec s6-setuidgid app \
|
||||||
/usr/bin/python3 /app/manage.py process_tasks
|
/usr/bin/python3 /app/manage.py process_tasks
|
|
@ -0,0 +1 @@
|
||||||
|
longrun
|
|
@ -1,9 +0,0 @@
|
||||||
#!/usr/bin/with-contenv bash
|
|
||||||
|
|
||||||
UMASK_SET=${UMASK_SET:-022}
|
|
||||||
umask "$UMASK_SET"
|
|
||||||
|
|
||||||
cd /app || exit
|
|
||||||
|
|
||||||
exec s6-setuidgid app \
|
|
||||||
/usr/local/bin/gunicorn -c /app/tubesync/gunicorn.py --capture-output tubesync.wsgi:application
|
|
|
@ -1,5 +0,0 @@
|
||||||
#!/usr/bin/with-contenv bash
|
|
||||||
|
|
||||||
cd /
|
|
||||||
|
|
||||||
/usr/sbin/nginx
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - creating missing metadata
|
||||||
|
|
||||||
|
This is a new feature in v0.9 of TubeSync and later. It allows you to create or
|
||||||
|
re-create missing metadata in your TubeSync download directories for missing `nfo`
|
||||||
|
files and thumbnails.
|
||||||
|
|
||||||
|
If you add a source with "write NFO files" or "copy thumbnails" disabled, download
|
||||||
|
some media and then update the source to write NFO files or copy thumbnails then
|
||||||
|
TubeSync will not automatically retroactively attempt to copy or create your missing
|
||||||
|
metadata files. You can use a special one-off command to manually write missing
|
||||||
|
metadata files to the correct locations.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
You have added a source without metadata writing enabled, downloaded some media, then
|
||||||
|
updated the source to enable metadata writing.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Run the batch metadata sync command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py sync-missing-metadata`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py sync-missing-metadata`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
Internally, this command loops over all your sources which have been saved with
|
||||||
|
"write NFO files" or "copy thumbnails" enabled. Then, loops over all media saved to
|
||||||
|
that source and confirms that the appropriate thumbnail files have been copied over and
|
||||||
|
the NFO file has been written if enabled.
|
|
@ -0,0 +1,81 @@
|
||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - importing existing media
|
||||||
|
|
||||||
|
This is a new feature in v0.9 of TubeSync and later. It allows you to mark existing
|
||||||
|
downloaded media as "downloaded" in TubeSync. You can use this feature if, for example,
|
||||||
|
you already have an extensive catalogue of downloaded media which you want to mark
|
||||||
|
as downloaded into TubeSync so TubeSync doesn't re-download media you already have.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
Your existing downloaded media MUST contain the unique ID. For YouTube videos, this is
|
||||||
|
means the YouTube video ID MUST be in the filename.
|
||||||
|
|
||||||
|
Supported extensions to be imported are .m4a, .ogg, .mkv, .mp3, .mp4 and .avi. Your
|
||||||
|
media you want to import must end in one of these file extensions.
|
||||||
|
|
||||||
|
## Caveats
|
||||||
|
|
||||||
|
As TubeSync does not probe media and your existing media may be re-encoded or in
|
||||||
|
different formats to what is available in the current media metadata there is no way
|
||||||
|
for TubeSync to know what codecs, resolution, bitrate etc. your imported media is in.
|
||||||
|
Any manually imported existing local media will display blank boxes for this
|
||||||
|
information on the TubeSync interface as it's unavailable.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Add your source to TubeSync
|
||||||
|
|
||||||
|
Add your source to TubeSync, such as a YouTube channel. **Make sure you untick the
|
||||||
|
"download media" checkbox.**
|
||||||
|
|
||||||
|
This will allow TubeSync to index all the available media on your source, but won't
|
||||||
|
start downloading any media.
|
||||||
|
|
||||||
|
### 2. Wait
|
||||||
|
|
||||||
|
Wait for all the media on your source to be indexed. This may take some time.
|
||||||
|
|
||||||
|
### 3. Move your existing media into TubeSync
|
||||||
|
|
||||||
|
You now need to move your existing media into TubeSync. You need to move the media
|
||||||
|
files into the correct download directories created by TubeSync. For example, if you
|
||||||
|
have downloaded videos for a YouTube channel "TestChannel", you would have added this
|
||||||
|
as a source called TestChannel and in a directory called test-channel in Tubesync. It
|
||||||
|
would have a download directory created on disk at:
|
||||||
|
|
||||||
|
`/path/to/downloads/test-channel`
|
||||||
|
|
||||||
|
You would move all of your pre-existing videos you downloaded outside of TubeSync for
|
||||||
|
this channel into this directory.
|
||||||
|
|
||||||
|
In short, your existing media needs to be moved into the correct TubeSync source
|
||||||
|
directory to be detected.
|
||||||
|
|
||||||
|
This is required so TubeSync can known which Source to link the media to.
|
||||||
|
|
||||||
|
### 4. Run the batch import command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py import-existing-media`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py import-existing-media`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
Internally, `import-existing-media` looks for the unique media key (for YouTube, this
|
||||||
|
is the YouTube video ID) in the filename and detects the source to link it to based
|
||||||
|
on the directory the media file is inside.
|
||||||
|
|
||||||
|
|
||||||
|
### 5. Re-enable downloading at the source
|
||||||
|
|
||||||
|
Edit your source and re-enable / tick the "download media" option. This will allow
|
||||||
|
TubeSync to download any missing media you did not manually import.
|
||||||
|
|
||||||
|
Note that TubeSync will still get screenshots write `nfo` files etc. for files you
|
||||||
|
manually import if enabled at the source level.
|
|
@ -0,0 +1,132 @@
|
||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - using other database backends
|
||||||
|
|
||||||
|
This is a new feature in v1.0 of TubeSync and later. It allows you to use a custom
|
||||||
|
existing external database server instead of the default SQLite database. You may want
|
||||||
|
to use this if you encounter performance issues with adding very large or a large
|
||||||
|
number of channels and database write contention (as shown by errors in the log)
|
||||||
|
become an issue.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
TubeSync supports SQLite (the automatic default) as well as PostgreSQL, MySQL and
|
||||||
|
MariaDB. For MariaDB just follow the MySQL instructions as the driver is the same.
|
||||||
|
|
||||||
|
You should start with a blank install of TubeSync. Migrating to a new database will
|
||||||
|
reset your database. If you are comfortable with Django you can export and re-import
|
||||||
|
existing database data with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ docker exec -i tubesync python3 /app/manage.py dumpdata > some-file.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Then change you database backend over, then use
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata - --format=json
|
||||||
|
```
|
||||||
|
|
||||||
|
As detailed in the Django documentation:
|
||||||
|
|
||||||
|
https://docs.djangoproject.com/en/3.1/ref/django-admin/#dumpdata
|
||||||
|
|
||||||
|
and:
|
||||||
|
|
||||||
|
https://docs.djangoproject.com/en/3.1/ref/django-admin/#loaddata
|
||||||
|
|
||||||
|
Further instructions are beyond the scope of TubeSync documenation and you should refer
|
||||||
|
to Django documentation for more details.
|
||||||
|
|
||||||
|
If you are not comfortable with the above, then skip the `dumpdata` steps, however
|
||||||
|
remember you will start again with a completely new database.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Create a database in your external database server
|
||||||
|
|
||||||
|
You need to create a database and a user with permissions to access the database in
|
||||||
|
your chosen external database server. Steps vary between PostgreSQL, MySQL and MariaDB
|
||||||
|
so this is up to you to work out.
|
||||||
|
|
||||||
|
### 2. Set the database connection string environment variable
|
||||||
|
|
||||||
|
You need to provide the database connection details to TubeSync via an environment
|
||||||
|
variable. The environment variable name is `DATABASE_CONNECTION` and the format is the
|
||||||
|
standard URL-style string. Examples are:
|
||||||
|
|
||||||
|
`postgresql://tubesync:password@localhost:5432/tubesync`
|
||||||
|
|
||||||
|
and
|
||||||
|
|
||||||
|
`mysql://tubesync:password@localhost:3306/tubesync`
|
||||||
|
|
||||||
|
*Important note:* For MySQL databases make SURE you create the tubesync database with
|
||||||
|
`utf8mb4` encoding, like:
|
||||||
|
|
||||||
|
`CREATE DATABASE tubesync CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci;`
|
||||||
|
|
||||||
|
Without `utf8mb4` encoding things like emojis in video titles (or any extended UTF8
|
||||||
|
characters) can cause issues.
|
||||||
|
|
||||||
|
### 3. Start TubeSync and check the logs
|
||||||
|
|
||||||
|
Once you start TubeSync with the new database connection you should see the folling log
|
||||||
|
entry in the container or stdout logs:
|
||||||
|
|
||||||
|
`2021-04-04 22:42:17,912 [tubesync/INFO] Using database connection: django.db.backends.postgresql://tubesync:[hidden]@localhost:5432/tubesync`
|
||||||
|
|
||||||
|
If you see a line similar to the above and the web interface loads, congratulations,
|
||||||
|
you are now using an external database server for your TubeSync data!
|
||||||
|
|
||||||
|
## Database Compression (For MariaDB)
|
||||||
|
With a lot of media files the `sync_media` table grows in size quickly.
|
||||||
|
You can save space using column compression using the following steps while using MariaDB:
|
||||||
|
|
||||||
|
1. Stop tubesync
|
||||||
|
2. Execute `ALTER TABLE sync_media MODIFY metadata LONGTEXT COMPRESSED;` on database tubesync
|
||||||
|
3. Start tunesync and confirm the connection still works.
|
||||||
|
|
||||||
|
## Docker Compose
|
||||||
|
|
||||||
|
If you're using Docker Compose and simply want to connect to another container with
|
||||||
|
the DB for the performance benefits, a configuration like this would be enough:
|
||||||
|
|
||||||
|
```
|
||||||
|
tubesync-db:
|
||||||
|
image: postgres:15.2
|
||||||
|
container_name: tubesync-db
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- /<path/to>/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||||
|
- /<path/to>/tubesync-db:/var/lib/postgresql/data
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=postgres
|
||||||
|
- POSTGRES_PASSWORD=testpassword
|
||||||
|
|
||||||
|
tubesync:
|
||||||
|
image: ghcr.io/meeb/tubesync:latest
|
||||||
|
container_name: tubesync
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 4848:4848
|
||||||
|
volumes:
|
||||||
|
- /<path/to>/tubesync/config:/config
|
||||||
|
- /<path/to>/YouTube:/downloads
|
||||||
|
environment:
|
||||||
|
- DATABASE_CONNECTION=postgresql://postgres:testpassword@tubesync-db:5432/tubesync
|
||||||
|
depends_on:
|
||||||
|
- tubesync-db
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that an `init.sql` file is needed to initialize the `tubesync`
|
||||||
|
database before it can be written to. This file should contain:
|
||||||
|
|
||||||
|
```
|
||||||
|
CREATE DATABASE tubesync;
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
Then it must be mapped to `/docker-entrypoint-initdb.d/init.sql` for it
|
||||||
|
to be executed on first startup of the container. See the `tubesync-db`
|
||||||
|
volume mapping above for how to do this.
|
|
@ -0,0 +1,30 @@
|
||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - reset media metadata from the command line
|
||||||
|
|
||||||
|
This command allows you to reset all media item metadata. You might want to use
|
||||||
|
this if you have a lot of media items with invalid metadata and you want to
|
||||||
|
wipe it which triggers the metadata to be redownloaded.
|
||||||
|
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
You have added some sources and media
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Run the reset tasks command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py reset-metadata`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py reset-metadata`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
When this is run, new tasks will be immediately created so all your media
|
||||||
|
items will start downloading updated metadata straight away, any missing information
|
||||||
|
such as thumbnails will be redownloaded, etc.
|
|
@ -0,0 +1,33 @@
|
||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - reset tasks from the command line
|
||||||
|
|
||||||
|
This is a new feature in v1.0 of TubeSync and later. It allows you to reset all
|
||||||
|
scheduled tasks from the command line as well as the "reset tasks" button in the
|
||||||
|
"tasks" tab of the dashboard.
|
||||||
|
|
||||||
|
This is useful for TubeSync installations where you may have a lot of media and
|
||||||
|
sources added and the "reset tasks" button may take too long to the extent where
|
||||||
|
the page times out (with a 502 error or similar issue).
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
You have added some sources and media
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Run the reset tasks command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py reset-tasks`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py reset-tasks`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
When this is run, new tasks will be immediately created so all your sources will be
|
||||||
|
indexed again straight away, any missing information such as thumbnails will be
|
||||||
|
redownloaded, etc.
|
|
@ -0,0 +1,50 @@
|
||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - using exported cookies
|
||||||
|
|
||||||
|
This is a new feature in v0.10 of TubeSync and later. It allows you to use the cookies
|
||||||
|
file exported from your browser in "Netscape" format with TubeSync to authenticate
|
||||||
|
to YouTube. This can bypass some throttling, age restrictions and other blocks at
|
||||||
|
YouTube.
|
||||||
|
|
||||||
|
**IMPORTANT NOTE**: Using cookies exported from your browser that is authenticated
|
||||||
|
to YouTube identifes your Google account as using TubeSync. This may result in
|
||||||
|
potential account impacts and is entirely at your own risk. Do not use this
|
||||||
|
feature unless you really know what you're doing.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
Have a browser that supports exporting your cookies and be logged into YouTube.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Export your cookies
|
||||||
|
|
||||||
|
You need to export cookies for youtube.com from your browser, you can either do
|
||||||
|
this manually or there are plug-ins to automate this for you. This file must be
|
||||||
|
in the "Netscape" cookie export format.
|
||||||
|
|
||||||
|
Save your cookies as a `cookies.txt` file.
|
||||||
|
|
||||||
|
### 2. Import into TubeSync
|
||||||
|
|
||||||
|
Drop the `cookies.txt` file into your TubeSync `config` directory.
|
||||||
|
|
||||||
|
If detected correctly, you will see something like this in the worker or container
|
||||||
|
logs:
|
||||||
|
|
||||||
|
```
|
||||||
|
YYYY-MM-DD HH:MM:SS,mmm [tubesync/INFO] [youtube-dl] using cookies.txt from: /config/cookies.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
If you see that line it's working correctly.
|
||||||
|
|
||||||
|
If you see errors in your logs like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
http.cookiejar.LoadError: '/config/cookies.txt' does not look like a Netscape format cookies file
|
||||||
|
```
|
||||||
|
|
||||||
|
Then your `cookies.txt` file was not generated or created correctly as it's not
|
||||||
|
in the required "Netscape" format. You can fix this by exporting your `cookies.txt`
|
||||||
|
in the correct "Netscape" format.
|
|
@ -0,0 +1,2 @@
|
||||||
|
[global]
|
||||||
|
extra-index-url=https://www.piwheels.org/simple
|
|
@ -1,10 +1,10 @@
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from .third_party_versions import youtube_dl_version, ffmpeg_version
|
from .third_party_versions import yt_dlp_version, ffmpeg_version
|
||||||
|
|
||||||
|
|
||||||
def app_details(request):
|
def app_details(request):
|
||||||
return {
|
return {
|
||||||
'app_version': str(settings.VERSION),
|
'app_version': str(settings.VERSION),
|
||||||
'youtube_dl_version': youtube_dl_version,
|
'yt_dlp_version': yt_dlp_version,
|
||||||
'ffmpeg_version': ffmpeg_version,
|
'ffmpeg_version': ffmpeg_version,
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,3 +20,10 @@ class DownloadFailedException(Exception):
|
||||||
exist.
|
exist.
|
||||||
'''
|
'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseConnectionError(Exception):
|
||||||
|
'''
|
||||||
|
Raised when parsing or initially connecting to a database.
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
import logging
|
import logging
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
logging_level = logging.DEBUG if settings.DEBUG else logging.INFO
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger('tubesync')
|
log = logging.getLogger('tubesync')
|
||||||
log.setLevel(logging.DEBUG)
|
log.setLevel(logging_level)
|
||||||
ch = logging.StreamHandler()
|
ch = logging.StreamHandler()
|
||||||
ch.setLevel(logging.DEBUG)
|
ch.setLevel(logging_level)
|
||||||
formatter = logging.Formatter('%(asctime)s [%(name)s/%(levelname)s] %(message)s')
|
formatter = logging.Formatter('%(asctime)s [%(name)s/%(levelname)s] %(message)s')
|
||||||
ch.setFormatter(formatter)
|
ch.setFormatter(formatter)
|
||||||
log.addHandler(ch)
|
log.addHandler(ch)
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
|
from django.conf import settings
|
||||||
from django.forms import BaseForm
|
from django.forms import BaseForm
|
||||||
|
from basicauth.middleware import BasicAuthMiddleware as BaseBasicAuthMiddleware
|
||||||
|
|
||||||
|
|
||||||
class MaterializeDefaultFieldsMiddleware:
|
class MaterializeDefaultFieldsMiddleware:
|
||||||
|
@ -19,3 +21,12 @@ class MaterializeDefaultFieldsMiddleware:
|
||||||
for _, field in v.fields.items():
|
for _, field in v.fields.items():
|
||||||
field.widget.attrs.update({'class':'browser-default'})
|
field.widget.attrs.update({'class':'browser-default'})
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class BasicAuthMiddleware(BaseBasicAuthMiddleware):
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
bypass_uris = getattr(settings, 'BASICAUTH_ALWAYS_ALLOW_URIS', [])
|
||||||
|
if request.path in bypass_uris:
|
||||||
|
return None
|
||||||
|
return super().process_request(request)
|
||||||
|
|
|
@ -1,20 +1,20 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'roboto-light';
|
font-family: 'roboto';
|
||||||
src: url('/static/fonts/roboto/roboto-light.woff') format('woff');
|
src: url('../fonts/roboto/roboto-light.woff') format('woff');
|
||||||
|
font-weight: lighter;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: 'roboto';
|
||||||
|
src: url('../fonts/roboto/roboto-regular.woff') format('woff');
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'roboto-regular';
|
font-family: 'roboto';
|
||||||
src: url('/static/fonts/roboto/roboto-regular.woff') format('woff');
|
src: url('../fonts/roboto/roboto-bold.woff') format('woff');
|
||||||
font-weight: normal;
|
|
||||||
font-style: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: 'roboto-bold';
|
|
||||||
src: url('/static/fonts/roboto/roboto-bold.woff') format('woff');
|
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
}
|
}
|
||||||
.help-text {
|
.help-text {
|
||||||
color: $form-help-text-colour;
|
color: $form-help-text-colour;
|
||||||
padding: 1rem 0 1rem 0;
|
padding-bottom: 1rem;
|
||||||
}
|
}
|
||||||
label {
|
label {
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
|
|
|
@ -5,6 +5,13 @@ html {
|
||||||
color: $text-colour;
|
color: $text-colour;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
display: flex;
|
||||||
|
min-height: 100vh;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
header {
|
header {
|
||||||
|
|
||||||
background-color: $header-background-colour;
|
background-color: $header-background-colour;
|
||||||
|
@ -174,8 +181,10 @@ main {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
padding: 5px 10px 5px 10px;
|
padding: 5px 8px 4px 8px;
|
||||||
margin: 0 3px 0 3px;
|
margin: 0 3px 6px 3px;
|
||||||
|
min-width: 40px;
|
||||||
|
min-height: 40px;
|
||||||
background-color: $pagination-background-colour;
|
background-color: $pagination-background-colour;
|
||||||
color: $pagination-text-colour;
|
color: $pagination-text-colour;
|
||||||
border: 2px $pagination-border-colour solid;
|
border: 2px $pagination-border-colour solid;
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
$font-family: 'roboto-regular', Arial, Helvetica, sans-serif;
|
$font-family: 'roboto', Arial, Helvetica, sans-serif;
|
||||||
$font-size: 1.05rem;
|
$font-size: 1.05rem;
|
||||||
|
|
|
@ -65,6 +65,7 @@ readers do not read off random characters that represent icons */
|
||||||
.#{$fa-css-prefix}-arrows-alt-h:before { content: fa-content($fa-var-arrows-alt-h); }
|
.#{$fa-css-prefix}-arrows-alt-h:before { content: fa-content($fa-var-arrows-alt-h); }
|
||||||
.#{$fa-css-prefix}-arrows-alt-v:before { content: fa-content($fa-var-arrows-alt-v); }
|
.#{$fa-css-prefix}-arrows-alt-v:before { content: fa-content($fa-var-arrows-alt-v); }
|
||||||
.#{$fa-css-prefix}-artstation:before { content: fa-content($fa-var-artstation); }
|
.#{$fa-css-prefix}-artstation:before { content: fa-content($fa-var-artstation); }
|
||||||
|
.#{$fa-css-prefix}-arrow-rotate-right:before { content: fa-content($fa-var-arrow-rotate-right); }
|
||||||
.#{$fa-css-prefix}-assistive-listening-systems:before { content: fa-content($fa-var-assistive-listening-systems); }
|
.#{$fa-css-prefix}-assistive-listening-systems:before { content: fa-content($fa-var-assistive-listening-systems); }
|
||||||
.#{$fa-css-prefix}-asterisk:before { content: fa-content($fa-var-asterisk); }
|
.#{$fa-css-prefix}-asterisk:before { content: fa-content($fa-var-asterisk); }
|
||||||
.#{$fa-css-prefix}-asymmetrik:before { content: fa-content($fa-var-asymmetrik); }
|
.#{$fa-css-prefix}-asymmetrik:before { content: fa-content($fa-var-asymmetrik); }
|
||||||
|
|
|
@ -80,6 +80,7 @@ $fa-var-arrow-right: \f061;
|
||||||
$fa-var-arrow-up: \f062;
|
$fa-var-arrow-up: \f062;
|
||||||
$fa-var-arrows-alt: \f0b2;
|
$fa-var-arrows-alt: \f0b2;
|
||||||
$fa-var-arrows-alt-h: \f337;
|
$fa-var-arrows-alt-h: \f337;
|
||||||
|
$fa-var-arrow-rotate-right: \f01e;
|
||||||
$fa-var-arrows-alt-v: \f338;
|
$fa-var-arrows-alt-v: \f338;
|
||||||
$fa-var-artstation: \f77a;
|
$fa-var-artstation: \f77a;
|
||||||
$fa-var-assistive-listening-systems: \f2a2;
|
$fa-var-assistive-listening-systems: \f2a2;
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
// Text Label Style
|
// Text Label Style
|
||||||
+ span:not(.lever) {
|
+ span:not(.lever) {
|
||||||
position: relative;
|
position: relative;
|
||||||
padding-left: 35px;
|
padding-left: 27px;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
height: 25px;
|
height: 25px;
|
||||||
|
|
|
@ -17,3 +17,16 @@ html {
|
||||||
visibility: visible;
|
visibility: visible;
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.flex-collection-container {
|
||||||
|
display: flex !important;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-grow {
|
||||||
|
flex-grow: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.help-text > i {
|
||||||
|
padding-right: 6px;
|
||||||
|
}
|
|
@ -16,6 +16,8 @@
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|
||||||
|
<div class="app">
|
||||||
|
|
||||||
<header>
|
<header>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<a href="{% url 'sync:dashboard' %}">
|
<a href="{% url 'sync:dashboard' %}">
|
||||||
|
@ -43,6 +45,8 @@
|
||||||
</div>
|
</div>
|
||||||
</main>
|
</main>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<p>
|
<p>
|
||||||
|
@ -53,7 +57,7 @@
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
<a href="https://github.com/meeb/tubesync" class="nowrap" target="_blank"><i class="fab fa-github"></i> TubeSync</a> version <strong>{{ app_version }}</strong> with
|
<a href="https://github.com/meeb/tubesync" class="nowrap" target="_blank"><i class="fab fa-github"></i> TubeSync</a> version <strong>{{ app_version }}</strong> with
|
||||||
<a href="https://yt-dl.org/" class="nowrap" target="_blank"><i class="fas fa-link"></i> youtube-dl</a> version <strong>{{ youtube_dl_version }}</strong> and
|
<a href="https://github.com/yt-dlp/yt-dlp" class="nowrap" target="_blank"><i class="fas fa-link"></i> yt-dlp</a> version <strong>{{ yt_dlp_version }}</strong> and
|
||||||
<a href="https://ffmpeg.org/" class="nowrap" target="_blank"><i class="fas fa-link"></i> FFmpeg</a> version <strong>{{ ffmpeg_version }}</strong>.
|
<a href="https://ffmpeg.org/" class="nowrap" target="_blank"><i class="fas fa-link"></i> FFmpeg</a> version <strong>{{ ffmpeg_version }}</strong>.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
<div class="pagination">
|
<div class="pagination">
|
||||||
{% for i in paginator.page_range %}
|
{% for i in paginator.page_range %}
|
||||||
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}">{{ i }}</a>
|
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}{% if only_skipped %}&only_skipped=yes{% endif %}">{{ i }}</a>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -2,6 +2,8 @@ import os.path
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.test import TestCase, Client
|
from django.test import TestCase, Client
|
||||||
from .testutils import prevent_request_warnings
|
from .testutils import prevent_request_warnings
|
||||||
|
from .utils import parse_database_connection_string, clean_filename
|
||||||
|
from .errors import DatabaseConnectionError
|
||||||
|
|
||||||
|
|
||||||
class ErrorPageTestCase(TestCase):
|
class ErrorPageTestCase(TestCase):
|
||||||
|
@ -61,3 +63,75 @@ class CommonStaticTestCase(TestCase):
|
||||||
favicon_real_path = os.path.join(os.sep.join(root_parts),
|
favicon_real_path = os.path.join(os.sep.join(root_parts),
|
||||||
os.sep.join(url_parts))
|
os.sep.join(url_parts))
|
||||||
self.assertTrue(os.path.exists(favicon_real_path))
|
self.assertTrue(os.path.exists(favicon_real_path))
|
||||||
|
|
||||||
|
|
||||||
|
class UtilsTestCase(TestCase):
|
||||||
|
|
||||||
|
def test_parse_database_connection_string(self):
|
||||||
|
database_dict = parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:5432/tubesync')
|
||||||
|
self.assertEqual(database_dict,
|
||||||
|
{
|
||||||
|
'DRIVER': 'postgresql',
|
||||||
|
'ENGINE': 'django.db.backends.postgresql',
|
||||||
|
'USER': 'tubesync',
|
||||||
|
'PASSWORD': 'password',
|
||||||
|
'HOST': 'localhost',
|
||||||
|
'PORT': 5432,
|
||||||
|
'NAME': 'tubesync',
|
||||||
|
'CONN_MAX_AGE': 300,
|
||||||
|
'OPTIONS': {},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
database_dict = parse_database_connection_string(
|
||||||
|
'mysql://tubesync:password@localhost:3306/tubesync')
|
||||||
|
self.assertEqual(database_dict,
|
||||||
|
{
|
||||||
|
'DRIVER': 'mysql',
|
||||||
|
'ENGINE': 'django.db.backends.mysql',
|
||||||
|
'USER': 'tubesync',
|
||||||
|
'PASSWORD': 'password',
|
||||||
|
'HOST': 'localhost',
|
||||||
|
'PORT': 3306,
|
||||||
|
'NAME': 'tubesync',
|
||||||
|
'CONN_MAX_AGE': 300,
|
||||||
|
'OPTIONS': {'charset': 'utf8mb4'}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# Invalid driver
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'test://tubesync:password@localhost:5432/tubesync')
|
||||||
|
# No username
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://password@localhost:5432/tubesync')
|
||||||
|
# No database name
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@5432')
|
||||||
|
# Invalid port
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:test/tubesync')
|
||||||
|
# Invalid port
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:65537/tubesync')
|
||||||
|
# Invalid username or password
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password:test@localhost:5432/tubesync')
|
||||||
|
# Invalid database name
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:5432/tubesync/test')
|
||||||
|
|
||||||
|
def test_clean_filename(self):
|
||||||
|
self.assertEqual(clean_filename('a'), 'a')
|
||||||
|
self.assertEqual(clean_filename('a\t'), 'a')
|
||||||
|
self.assertEqual(clean_filename('a\n'), 'a')
|
||||||
|
self.assertEqual(clean_filename('a a'), 'a a')
|
||||||
|
self.assertEqual(clean_filename('a a'), 'a a')
|
||||||
|
self.assertEqual(clean_filename('a\t\t\ta'), 'a a')
|
||||||
|
self.assertEqual(clean_filename('a\t\t\ta\t\t\t'), 'a a')
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from youtube_dl import version as yt_version
|
from yt_dlp import version as yt_dlp_version
|
||||||
|
|
||||||
|
|
||||||
youtube_dl_version = str(yt_version.__version__)
|
yt_dlp_version = str(yt_dlp_version.__version__)
|
||||||
ffmpeg_version = '(shared install)'
|
ffmpeg_version = '(shared install)'
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,96 @@
|
||||||
from urllib.parse import urlunsplit, urlencode
|
import string
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import urlunsplit, urlencode, urlparse
|
||||||
|
from yt_dlp.utils import LazyList
|
||||||
|
from .errors import DatabaseConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
def parse_database_connection_string(database_connection_string):
|
||||||
|
'''
|
||||||
|
Parses a connection string in a URL style format, such as:
|
||||||
|
postgresql://tubesync:password@localhost:5432/tubesync
|
||||||
|
mysql://someuser:somepassword@localhost:3306/tubesync
|
||||||
|
into a Django-compatible settings.DATABASES dict format.
|
||||||
|
'''
|
||||||
|
valid_drivers = ('postgresql', 'mysql')
|
||||||
|
default_ports = {
|
||||||
|
'postgresql': 5432,
|
||||||
|
'mysql': 3306,
|
||||||
|
}
|
||||||
|
django_backends = {
|
||||||
|
'postgresql': 'django.db.backends.postgresql',
|
||||||
|
'mysql': 'django.db.backends.mysql',
|
||||||
|
}
|
||||||
|
backend_options = {
|
||||||
|
'postgresql': {},
|
||||||
|
'mysql': {
|
||||||
|
'charset': 'utf8mb4',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
parts = urlparse(str(database_connection_string))
|
||||||
|
except Exception as e:
|
||||||
|
raise DatabaseConnectionError(f'Failed to parse "{database_connection_string}" '
|
||||||
|
f'as a database connection string: {e}') from e
|
||||||
|
driver = parts.scheme
|
||||||
|
user_pass_host_port = parts.netloc
|
||||||
|
database = parts.path
|
||||||
|
if driver not in valid_drivers:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string '
|
||||||
|
f'"{database_connection_string}" specified an '
|
||||||
|
f'invalid driver, must be one of {valid_drivers}')
|
||||||
|
django_driver = django_backends.get(driver)
|
||||||
|
host_parts = user_pass_host_port.split('@')
|
||||||
|
if len(host_parts) != 2:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string netloc must be in '
|
||||||
|
f'the format of user:pass@host')
|
||||||
|
user_pass, host_port = host_parts
|
||||||
|
user_pass_parts = user_pass.split(':')
|
||||||
|
if len(user_pass_parts) != 2:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string netloc must be in '
|
||||||
|
f'the format of user:pass@host')
|
||||||
|
username, password = user_pass_parts
|
||||||
|
host_port_parts = host_port.split(':')
|
||||||
|
if len(host_port_parts) == 1:
|
||||||
|
# No port number, assign a default port
|
||||||
|
hostname = host_port_parts[0]
|
||||||
|
port = default_ports.get(driver)
|
||||||
|
elif len(host_port_parts) == 2:
|
||||||
|
# Host name and port number
|
||||||
|
hostname, port = host_port_parts
|
||||||
|
try:
|
||||||
|
port = int(port)
|
||||||
|
except (ValueError, TypeError) as e:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string contained an '
|
||||||
|
f'invalid port, ports must be integers: '
|
||||||
|
f'{e}') from e
|
||||||
|
if not 0 < port < 63336:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string contained an '
|
||||||
|
f'invalid port, ports must be between 1 and '
|
||||||
|
f'65535, got {port}')
|
||||||
|
else:
|
||||||
|
# Malformed
|
||||||
|
raise DatabaseConnectionError(f'Database connection host must be a hostname or '
|
||||||
|
f'a hostname:port combination')
|
||||||
|
if database.startswith('/'):
|
||||||
|
database = database[1:]
|
||||||
|
if not database:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string path must be a '
|
||||||
|
f'string in the format of /databasename')
|
||||||
|
if '/' in database:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string path can only '
|
||||||
|
f'contain a single string name, got: {database}')
|
||||||
|
return {
|
||||||
|
'DRIVER': driver,
|
||||||
|
'ENGINE': django_driver,
|
||||||
|
'NAME': database,
|
||||||
|
'USER': username,
|
||||||
|
'PASSWORD': password,
|
||||||
|
'HOST': hostname,
|
||||||
|
'PORT': port,
|
||||||
|
'CONN_MAX_AGE': 300,
|
||||||
|
'OPTIONS': backend_options.get(driver),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_client_ip(request):
|
def get_client_ip(request):
|
||||||
|
@ -14,3 +106,26 @@ def append_uri_params(uri, params):
|
||||||
uri = str(uri)
|
uri = str(uri)
|
||||||
qs = urlencode(params)
|
qs = urlencode(params)
|
||||||
return urlunsplit(('', '', uri, qs, ''))
|
return urlunsplit(('', '', uri, qs, ''))
|
||||||
|
|
||||||
|
|
||||||
|
def clean_filename(filename):
|
||||||
|
if not isinstance(filename, str):
|
||||||
|
raise ValueError(f'filename must be a str, got {type(filename)}')
|
||||||
|
to_scrub = '<>\/:*?"|%'
|
||||||
|
for char in to_scrub:
|
||||||
|
filename = filename.replace(char, '')
|
||||||
|
clean_filename = ''
|
||||||
|
for c in filename:
|
||||||
|
if c in string.whitespace:
|
||||||
|
c = ' '
|
||||||
|
if ord(c) > 30:
|
||||||
|
clean_filename += c
|
||||||
|
return clean_filename.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def json_serial(obj):
|
||||||
|
if isinstance(obj, datetime):
|
||||||
|
return obj.isoformat()
|
||||||
|
if isinstance(obj, LazyList):
|
||||||
|
return list(obj)
|
||||||
|
raise TypeError(f'Type {type(obj)} is not json_serial()-able')
|
||||||
|
|
1183
tubesync/spam
1183
tubesync/spam
File diff suppressed because it is too large
Load Diff
|
@ -7,7 +7,7 @@ class SourceAdmin(admin.ModelAdmin):
|
||||||
|
|
||||||
ordering = ('-created',)
|
ordering = ('-created',)
|
||||||
list_display = ('uuid', 'name', 'source_type', 'last_crawl',
|
list_display = ('uuid', 'name', 'source_type', 'last_crawl',
|
||||||
'has_failed')
|
'download_media', 'has_failed')
|
||||||
readonly_fields = ('uuid', 'created')
|
readonly_fields = ('uuid', 'created')
|
||||||
search_fields = ('uuid', 'key', 'name')
|
search_fields = ('uuid', 'key', 'name')
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,109 @@
|
||||||
|
from django.forms import MultipleChoiceField, CheckboxSelectMultiple, Field, TypedMultipleChoiceField
|
||||||
|
from django.db import models
|
||||||
|
from typing import Any, Optional, Dict
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
# this is a form field!
|
||||||
|
class CustomCheckboxSelectMultiple(CheckboxSelectMultiple):
|
||||||
|
template_name = 'widgets/checkbox_select.html'
|
||||||
|
option_template_name = 'widgets/checkbox_option.html'
|
||||||
|
|
||||||
|
def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]:
|
||||||
|
ctx = super().get_context(name, value, attrs)['widget']
|
||||||
|
ctx["multipleChoiceProperties"] = []
|
||||||
|
for _group, options, _index in ctx["optgroups"]:
|
||||||
|
for option in options:
|
||||||
|
if not isinstance(value,str) and not isinstance(value,list) and ( option["value"] in value.selected_choices or ( value.allow_all and value.all_choice in value.selected_choices ) ):
|
||||||
|
checked = True
|
||||||
|
else:
|
||||||
|
checked = False
|
||||||
|
|
||||||
|
ctx["multipleChoiceProperties"].append({
|
||||||
|
"template_name": option["template_name"],
|
||||||
|
"type": option["type"],
|
||||||
|
"value": option["value"],
|
||||||
|
"label": option["label"],
|
||||||
|
"name": option["name"],
|
||||||
|
"checked": checked})
|
||||||
|
|
||||||
|
return { 'widget': ctx }
|
||||||
|
|
||||||
|
# this is a database field!
|
||||||
|
class CommaSepChoiceField(models.Field):
|
||||||
|
"Implements comma-separated storage of lists"
|
||||||
|
|
||||||
|
def __init__(self, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, *args, **kwargs):
|
||||||
|
self.separator = separator
|
||||||
|
self.possible_choices = possible_choices
|
||||||
|
self.selected_choices = []
|
||||||
|
self.allow_all = allow_all
|
||||||
|
self.all_label = all_label
|
||||||
|
self.all_choice = all_choice
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def deconstruct(self):
|
||||||
|
name, path, args, kwargs = super().deconstruct()
|
||||||
|
if self.separator != ",":
|
||||||
|
kwargs['separator'] = self.separator
|
||||||
|
kwargs['possible_choices'] = self.possible_choices
|
||||||
|
return name, path, args, kwargs
|
||||||
|
|
||||||
|
def db_type(self, connection):
|
||||||
|
return 'text'
|
||||||
|
|
||||||
|
def get_my_choices(self):
|
||||||
|
choiceArray = []
|
||||||
|
if self.possible_choices is None:
|
||||||
|
return choiceArray
|
||||||
|
if self.allow_all:
|
||||||
|
choiceArray.append((self.all_choice, _(self.all_label)))
|
||||||
|
|
||||||
|
for t in self.possible_choices:
|
||||||
|
choiceArray.append(t)
|
||||||
|
|
||||||
|
return choiceArray
|
||||||
|
|
||||||
|
def formfield(self, **kwargs):
|
||||||
|
# This is a fairly standard way to set up some defaults
|
||||||
|
# while letting the caller override them.
|
||||||
|
defaults = {'form_class': MultipleChoiceField,
|
||||||
|
'choices': self.get_my_choices,
|
||||||
|
'widget': CustomCheckboxSelectMultiple,
|
||||||
|
'label': '',
|
||||||
|
'required': False}
|
||||||
|
defaults.update(kwargs)
|
||||||
|
#del defaults.required
|
||||||
|
return super().formfield(**defaults)
|
||||||
|
|
||||||
|
def deconstruct(self):
|
||||||
|
name, path, args, kwargs = super().deconstruct()
|
||||||
|
# Only include kwarg if it's not the default
|
||||||
|
if self.separator != ",":
|
||||||
|
kwargs['separator'] = self.separator
|
||||||
|
return name, path, args, kwargs
|
||||||
|
|
||||||
|
def from_db_value(self, value, expr, conn):
|
||||||
|
if value is None:
|
||||||
|
self.selected_choices = []
|
||||||
|
else:
|
||||||
|
self.selected_choices = value.split(",")
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def get_prep_value(self, value):
|
||||||
|
if value is None:
|
||||||
|
return ""
|
||||||
|
if not isinstance(value,list):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if self.all_choice not in value:
|
||||||
|
return ",".join(value)
|
||||||
|
else:
|
||||||
|
return self.all_choice
|
||||||
|
|
||||||
|
def get_text_for_value(self, val):
|
||||||
|
fval = [i for i in self.possible_choices if i[0] == val]
|
||||||
|
if len(fval) <= 0:
|
||||||
|
return []
|
||||||
|
else:
|
||||||
|
return fval[0][1]
|
|
@ -0,0 +1,51 @@
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db.models import signals
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media, MediaServer
|
||||||
|
from sync.signals import media_post_delete
|
||||||
|
from sync.tasks import rescan_media_server
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = ('Deletes a source by UUID')
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--source', action='store', required=True, help='Source UUID')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
source_uuid_str = options.get('source', '')
|
||||||
|
try:
|
||||||
|
source_uuid = uuid.UUID(source_uuid_str)
|
||||||
|
except Exception as e:
|
||||||
|
raise CommandError(f'Failed to parse source UUID: {e}')
|
||||||
|
log.info(f'Deleting source with UUID: {source_uuid}')
|
||||||
|
# Fetch the source by UUID
|
||||||
|
try:
|
||||||
|
source = Source.objects.get(uuid=source_uuid)
|
||||||
|
except Source.DoesNotExist:
|
||||||
|
raise CommandError(f'Source does not exist with '
|
||||||
|
f'UUID: {source_uuid}')
|
||||||
|
# Detach post-delete signal for Media so we don't spam media servers
|
||||||
|
signals.post_delete.disconnect(media_post_delete, sender=Media)
|
||||||
|
# Delete the source, triggering pre-delete signals for each media item
|
||||||
|
log.info(f'Found source with UUID "{source.uuid}" with name '
|
||||||
|
f'"{source.name}" and deleting it, this may take some time!')
|
||||||
|
source.delete()
|
||||||
|
# Update any media servers
|
||||||
|
for mediaserver in MediaServer.objects.all():
|
||||||
|
log.info(f'Scheduling media server updates')
|
||||||
|
verbose_name = _('Request media server rescan for "{}"')
|
||||||
|
rescan_media_server(
|
||||||
|
str(mediaserver.pk),
|
||||||
|
priority=0,
|
||||||
|
verbose_name=verbose_name.format(mediaserver),
|
||||||
|
remove_existing_tasks=True
|
||||||
|
)
|
||||||
|
# Re-attach signals
|
||||||
|
signals.post_delete.connect(media_post_delete, sender=Media)
|
||||||
|
# All done
|
||||||
|
log.info('Done')
|
|
@ -0,0 +1,55 @@
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = ('Scans download media directories for media not yet downloaded and ',
|
||||||
|
'marks them as downloaded')
|
||||||
|
extra_extensions = ['mp3', 'mp4', 'avi']
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Building directory to Source map...')
|
||||||
|
dirmap = {}
|
||||||
|
for s in Source.objects.all():
|
||||||
|
dirmap[s.directory_path] = s
|
||||||
|
log.info(f'Scanning sources...')
|
||||||
|
file_extensions = list(Source.EXTENSIONS) + self.extra_extensions
|
||||||
|
for sourceroot, source in dirmap.items():
|
||||||
|
media = list(Media.objects.filter(source=source, downloaded=False,
|
||||||
|
skip=False))
|
||||||
|
if not media:
|
||||||
|
log.info(f'Source "{source}" has no missing media')
|
||||||
|
continue
|
||||||
|
log.info(f'Scanning Source "{source}" directory for media to '
|
||||||
|
f'import: {sourceroot}, looking for {len(media)} '
|
||||||
|
f'undownloaded and unskipped items')
|
||||||
|
on_disk = []
|
||||||
|
for (root, dirs, files) in os.walk(sourceroot):
|
||||||
|
rootpath = Path(root)
|
||||||
|
for filename in files:
|
||||||
|
filepart, ext = os.path.splitext(filename)
|
||||||
|
if ext.startswith('.'):
|
||||||
|
ext = ext[1:]
|
||||||
|
ext = ext.strip().lower()
|
||||||
|
if ext not in file_extensions:
|
||||||
|
continue
|
||||||
|
on_disk.append(str(rootpath / filename))
|
||||||
|
filemap = {}
|
||||||
|
for item in media:
|
||||||
|
for filepath in on_disk:
|
||||||
|
if item.key in filepath:
|
||||||
|
# The unique item key is in the file name on disk, map it to
|
||||||
|
# the undownloaded media item
|
||||||
|
filemap[filepath] = item
|
||||||
|
continue
|
||||||
|
for filepath, item in filemap.items():
|
||||||
|
log.info(f'Matched on-disk file: {filepath} '
|
||||||
|
f'to media item: {item.source} / {item}')
|
||||||
|
item.media_file.name = filepath
|
||||||
|
item.downloaded = True
|
||||||
|
item.save()
|
||||||
|
log.info('Done')
|
|
@ -0,0 +1,15 @@
|
||||||
|
import os
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media, MediaServer
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = ('Lists sources')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Listing sources...')
|
||||||
|
for source in Source.objects.all():
|
||||||
|
log.info(f' - {source.uuid}: {source.name}')
|
||||||
|
log.info('Done')
|
|
@ -0,0 +1,19 @@
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from sync.models import Media
|
||||||
|
|
||||||
|
|
||||||
|
from common.logger import log
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Resets all media item metadata'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Resettings all media metadata...')
|
||||||
|
# Delete all metadata
|
||||||
|
Media.objects.update(metadata=None)
|
||||||
|
# Trigger the save signal on each media item
|
||||||
|
for item in Media.objects.all():
|
||||||
|
item.save()
|
||||||
|
log.info('Done')
|
|
@ -0,0 +1,33 @@
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from background_task.models import Task
|
||||||
|
from sync.models import Source
|
||||||
|
from sync.tasks import index_source_task
|
||||||
|
|
||||||
|
|
||||||
|
from common.logger import log
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Resets all tasks'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Resettings all tasks...')
|
||||||
|
# Delete all tasks
|
||||||
|
Task.objects.all().delete()
|
||||||
|
# Iter all tasks
|
||||||
|
for source in Source.objects.all():
|
||||||
|
# Recreate the initial indexing task
|
||||||
|
log.info(f'Resetting tasks for source: {source}')
|
||||||
|
verbose_name = _('Index media from source "{}"')
|
||||||
|
index_source_task(
|
||||||
|
str(source.pk),
|
||||||
|
repeat=source.index_schedule,
|
||||||
|
queue=str(source.pk),
|
||||||
|
priority=5,
|
||||||
|
verbose_name=verbose_name.format(source.name)
|
||||||
|
)
|
||||||
|
# This also chains down to call each Media objects .save() as well
|
||||||
|
source.save()
|
||||||
|
log.info('Done')
|
|
@ -0,0 +1,34 @@
|
||||||
|
import os
|
||||||
|
from shutil import copyfile
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db.models import Q
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media
|
||||||
|
from sync.utils import write_text_file
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Syncs missing metadata (such as nfo files) if source settings are updated'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Syncing missing metadata...')
|
||||||
|
sources = Source.objects.filter(Q(copy_thumbnails=True) | Q(write_nfo=True))
|
||||||
|
for source in sources.order_by('name'):
|
||||||
|
log.info(f'Finding media for source: {source}')
|
||||||
|
for item in Media.objects.filter(source=source, downloaded=True):
|
||||||
|
log.info(f'Checking media for missing metadata: {source} / {item}')
|
||||||
|
thumbpath = item.thumbpath
|
||||||
|
if not thumbpath.is_file():
|
||||||
|
if item.thumb:
|
||||||
|
log.info(f'Copying missing thumbnail from: {item.thumb.path} '
|
||||||
|
f'to: {thumbpath}')
|
||||||
|
copyfile(item.thumb.path, thumbpath)
|
||||||
|
else:
|
||||||
|
log.error(f'Tried to copy missing thumbnail for {item} but '
|
||||||
|
f'the thumbnail has not been downloaded')
|
||||||
|
nfopath = item.nfopath
|
||||||
|
if not nfopath.is_file():
|
||||||
|
log.info(f'Writing missing NFO file: {nfopath}')
|
||||||
|
write_text_file(nfopath, item.nfoxml)
|
||||||
|
log.info('Done')
|
|
@ -1,6 +1,7 @@
|
||||||
import json
|
import json
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from sync.youtube import get_media_info
|
from sync.youtube import get_media_info
|
||||||
|
from common.utils import json_serial
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
@ -14,5 +15,6 @@ class Command(BaseCommand):
|
||||||
url = options['url']
|
url = options['url']
|
||||||
self.stdout.write(f'Showing information for URL: {url}')
|
self.stdout.write(f'Showing information for URL: {url}')
|
||||||
info = get_media_info(url)
|
info = get_media_info(url)
|
||||||
self.stdout.write(json.dumps(info, indent=4, sort_keys=True))
|
d = json.dumps(info, indent=4, sort_keys=True, default=json_serial)
|
||||||
|
self.stdout.write(d)
|
||||||
self.stdout.write('Done')
|
self.stdout.write('Done')
|
||||||
|
|
|
@ -53,6 +53,8 @@ def get_best_audio_format(media):
|
||||||
# If the format has a video stream, skip it
|
# If the format has a video stream, skip it
|
||||||
if fmt['vcodec'] is not None:
|
if fmt['vcodec'] is not None:
|
||||||
continue
|
continue
|
||||||
|
if not fmt['acodec']:
|
||||||
|
continue
|
||||||
audio_formats.append(fmt)
|
audio_formats.append(fmt)
|
||||||
audio_formats = list(reversed(sorted(audio_formats, key=lambda k: k['abr'])))
|
audio_formats = list(reversed(sorted(audio_formats, key=lambda k: k['abr'])))
|
||||||
if not audio_formats:
|
if not audio_formats:
|
||||||
|
@ -66,7 +68,7 @@ def get_best_audio_format(media):
|
||||||
# No codecs matched
|
# No codecs matched
|
||||||
if media.source.can_fallback:
|
if media.source.can_fallback:
|
||||||
# Can fallback, find the next highest bitrate non-matching codec
|
# Can fallback, find the next highest bitrate non-matching codec
|
||||||
return False, audio_formats[0]
|
return False, audio_formats[0]['id']
|
||||||
else:
|
else:
|
||||||
# Can't fallback
|
# Can't fallback
|
||||||
return False, False
|
return False, False
|
||||||
|
@ -88,6 +90,8 @@ def get_best_video_format(media):
|
||||||
# If the format has an audio stream, skip it
|
# If the format has an audio stream, skip it
|
||||||
if fmt['acodec'] is not None:
|
if fmt['acodec'] is not None:
|
||||||
continue
|
continue
|
||||||
|
if not fmt['vcodec']:
|
||||||
|
continue
|
||||||
if media.source.source_resolution.strip().upper() == fmt['format']:
|
if media.source.source_resolution.strip().upper() == fmt['format']:
|
||||||
video_formats.append(fmt)
|
video_formats.append(fmt)
|
||||||
# Check we matched some streams
|
# Check we matched some streams
|
||||||
|
|
|
@ -44,7 +44,9 @@ class PlexMediaServer(MediaServer):
|
||||||
'<p>The <strong>libraries</strong> is a comma-separated list of Plex '
|
'<p>The <strong>libraries</strong> is a comma-separated list of Plex '
|
||||||
'library or section IDs, you can find out how to get your library or '
|
'library or section IDs, you can find out how to get your library or '
|
||||||
'section IDs <a href="https://support.plex.tv/articles/201242707-plex-'
|
'section IDs <a href="https://support.plex.tv/articles/201242707-plex-'
|
||||||
'media-scanner-via-command-line/#toc-1" target="_blank">here</a>.</p>')
|
'media-scanner-via-command-line/#toc-1" target="_blank">here</a> or '
|
||||||
|
'<a href="https://www.plexopedia.com/plex-media-server/api/server/libraries/" '
|
||||||
|
'target="_blank">here</a></p>.')
|
||||||
|
|
||||||
def make_request(self, uri='/', params={}):
|
def make_request(self, uri='/', params={}):
|
||||||
headers = {'User-Agent': 'TubeSync'}
|
headers = {'User-Agent': 'TubeSync'}
|
||||||
|
@ -124,7 +126,7 @@ class PlexMediaServer(MediaServer):
|
||||||
# Seems we have a valid library sections page, get the library IDs
|
# Seems we have a valid library sections page, get the library IDs
|
||||||
remote_libraries = {}
|
remote_libraries = {}
|
||||||
try:
|
try:
|
||||||
for parent in parsed_response.getiterator('MediaContainer'):
|
for parent in parsed_response.iter('MediaContainer'):
|
||||||
for d in parent:
|
for d in parent:
|
||||||
library_id = d.attrib['key']
|
library_id = d.attrib['key']
|
||||||
library_name = d.attrib['title']
|
library_name = d.attrib['title']
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Generated by Django 3.1.6 on 2021-02-18 04:42
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0008_source_download_cap'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='download_media',
|
||||||
|
field=models.BooleanField(default=True, help_text='Download media from this source, if not selected the source will only be indexed', verbose_name='download media'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='media_file',
|
||||||
|
field=models.FileField(blank=True, help_text='Media file', max_length=200, null=True, storage=django.core.files.storage.FileSystemStorage(location='/home/meeb/Repos/github.com/meeb/tubesync/tubesync/downloads'), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='media_format',
|
||||||
|
field=models.CharField(default='{yyyymmdd}_{source}_{title}_{key}_{format}.{ext}', help_text='File format to use for saving files, detailed options at bottom of page.', max_length=200, verbose_name='media format'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Generated by Django 3.2.7 on 2021-09-24 05:54
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0009_auto_20210218_0442'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='media_file',
|
||||||
|
field=models.FileField(blank=True, help_text='Media file', max_length=255, null=True, storage=django.core.files.storage.FileSystemStorage(location='/home/meeb/Repos/github.com/meeb/tubesync/tubesync/downloads'), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='index_schedule',
|
||||||
|
field=models.IntegerField(choices=[(3600, 'Every hour'), (7200, 'Every 2 hours'), (10800, 'Every 3 hours'), (14400, 'Every 4 hours'), (18000, 'Every 5 hours'), (21600, 'Every 6 hours'), (43200, 'Every 12 hours'), (86400, 'Every 24 hours'), (259200, 'Every 3 days'), (604800, 'Every 7 days'), (0, 'Never')], db_index=True, default=86400, help_text='Schedule of how often to index the source for new media', verbose_name='index schedule'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='media_format',
|
||||||
|
field=models.CharField(default='{yyyy_mm_dd}_{source}_{title}_{key}_{format}.{ext}', help_text='File format to use for saving files, detailed options at bottom of page.', max_length=200, verbose_name='media format'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,21 @@
|
||||||
|
# Generated by Django 3.2.11 on 2022-02-01 16:54
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0010_auto_20210924_0554'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='write_json',
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False, help_text='Write a JSON file with the media info, these may be detected and used by some media servers', verbose_name='write json'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.12 on 2022-04-06 06:19
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0011_auto_20220201_1654'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='downloaded_format',
|
||||||
|
field=models.CharField(blank=True, help_text='Video format (resolution) of the downloaded media', max_length=30, null=True, verbose_name='downloaded format'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,25 @@
|
||||||
|
# Generated by Django 3.2.12 on 2022-04-06 06:19
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
def fix_media_file(apps, schema_editor):
|
||||||
|
Media = apps.get_model('sync', 'Media')
|
||||||
|
for media in Media.objects.filter(downloaded=True):
|
||||||
|
download_dir = str(settings.DOWNLOAD_ROOT)
|
||||||
|
|
||||||
|
if media.media_file.name.startswith(download_dir):
|
||||||
|
media.media_file.name = media.media_file.name[len(download_dir) + 1:]
|
||||||
|
media.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0012_alter_media_downloaded_format'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(fix_media_file)
|
||||||
|
]
|
|
@ -0,0 +1,21 @@
|
||||||
|
# Generated by Django 3.2.15 on 2022-12-28 20:33
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0013_fix_elative_media_file'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='media_file',
|
||||||
|
field=models.FileField(blank=True, help_text='Media file', max_length=255, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/media-data/', location=str(settings.DOWNLOAD_ROOT)), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,23 @@
|
||||||
|
# Generated by Django 3.2.17 on 2023-02-13 06:03
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0014_alter_media_media_file'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='media',
|
||||||
|
name='manual_skip',
|
||||||
|
field=models.BooleanField(db_index=True, default=False, help_text='Media marked as "skipped", won\' be downloaded', verbose_name='manual_skip'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='skip',
|
||||||
|
field=models.BooleanField(db_index=True, default=False, help_text='INTERNAL FLAG - Media will be skipped and not downloaded', verbose_name='skip'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,34 @@
|
||||||
|
# Generated by Django 3.2.18 on 2023-02-14 20:52
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0015_auto_20230213_0603'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='embed_metadata',
|
||||||
|
field=models.BooleanField(default=False, help_text='Embed metadata from source into file', verbose_name='embed metadata'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='embed_thumbnail',
|
||||||
|
field=models.BooleanField(default=False, help_text='Embed thumbnail into the file', verbose_name='embed thumbnail'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='enable_sponsorblock',
|
||||||
|
field=models.BooleanField(default=True, help_text='Use SponsorBlock?', verbose_name='enable sponsorblock'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='sponsorblock_categories',
|
||||||
|
field=sync.models.CommaSepChoiceField(default='all', possible_choices=(('all', 'All'), ('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section'))),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,19 @@
|
||||||
|
# Generated by Django 3.2.18 on 2023-02-20 02:23
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
import sync.fields
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0016_auto_20230214_2052'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='sponsorblock_categories',
|
||||||
|
field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', separator=''),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,27 @@
|
||||||
|
# Generated by pac
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0017_alter_source_sponsorblock_categories'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='write_subtitles',
|
||||||
|
field=models.BooleanField(default=False, help_text='Download video subtitles', verbose_name='write subtitles'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='auto_subtitles',
|
||||||
|
field=models.BooleanField(default=False, help_text='Accept auto-generated subtitles', verbose_name='accept auto subtitles'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='sub_langs',
|
||||||
|
field=models.CharField(default='en', help_text='List of subtitles langs to download comma-separated. Example: en,fr',max_length=30),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,17 @@
|
||||||
|
# Generated by pac
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0018_source_subtitles'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='delete_removed_media',
|
||||||
|
field=models.BooleanField(default=False, help_text='Delete media that is no longer on this playlist', verbose_name='delete removed media'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,29 @@
|
||||||
|
# Generated by Django 3.2.22 on 2023-10-24 17:25
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0019_add_delete_removed_media'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='filter_text',
|
||||||
|
field=models.CharField(blank=True, default='', help_text='Regex compatible filter string for video titles', max_length=100, verbose_name='filter string'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='auto_subtitles',
|
||||||
|
field=models.BooleanField(default=False, help_text='Accept auto-generated subtitles', verbose_name='accept auto-generated subs'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='sub_langs',
|
||||||
|
field=models.CharField(default='en', help_text='List of subtitles langs to download, comma-separated. Example: en,fr or all,-fr,-live_chat', max_length=30, validators=[django.core.validators.RegexValidator(message='Subtitle langs must be a comma-separated list of langs. example: en,fr or all,-fr,-live_chat', regex='^(\\-?[\\_\\.a-zA-Z]+,)*(\\-?[\\_\\.a-zA-Z]+){1}$')], verbose_name='subs langs'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,17 @@
|
||||||
|
# Generated by pac
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0020_auto_20231024_1825'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='delete_files_on_disk',
|
||||||
|
field=models.BooleanField(default=False, help_text='Delete files on disk when they are removed from TubeSync', verbose_name='delete files on disk'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
import json
|
import json
|
||||||
|
import re
|
||||||
from xml.etree import ElementTree
|
from xml.etree import ElementTree
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
@ -8,20 +9,21 @@ from pathlib import Path
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.core.files.storage import FileSystemStorage
|
from django.core.files.storage import FileSystemStorage
|
||||||
|
from django.core.validators import RegexValidator
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from common.errors import NoFormatException
|
from common.errors import NoFormatException
|
||||||
|
from common.utils import clean_filename
|
||||||
from .youtube import (get_media_info as get_youtube_media_info,
|
from .youtube import (get_media_info as get_youtube_media_info,
|
||||||
download_media as download_youtube_media)
|
download_media as download_youtube_media)
|
||||||
from .utils import seconds_to_timestr, parse_media_format
|
from .utils import seconds_to_timestr, parse_media_format
|
||||||
from .matching import (get_best_combined_format, get_best_audio_format,
|
from .matching import (get_best_combined_format, get_best_audio_format,
|
||||||
get_best_video_format)
|
get_best_video_format)
|
||||||
from .mediaservers import PlexMediaServer
|
from .mediaservers import PlexMediaServer
|
||||||
|
from .fields import CommaSepChoiceField
|
||||||
|
|
||||||
|
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT), base_url='/media-data/')
|
||||||
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT))
|
|
||||||
|
|
||||||
|
|
||||||
class Source(models.Model):
|
class Source(models.Model):
|
||||||
'''
|
'''
|
||||||
|
@ -100,6 +102,48 @@ class Source(models.Model):
|
||||||
(FALLBACK_NEXT_BEST_HD, _('Get next best resolution but at least HD'))
|
(FALLBACK_NEXT_BEST_HD, _('Get next best resolution but at least HD'))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
EXTENSION_M4A = 'm4a'
|
||||||
|
EXTENSION_OGG = 'ogg'
|
||||||
|
EXTENSION_MKV = 'mkv'
|
||||||
|
EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV)
|
||||||
|
|
||||||
|
# as stolen from: https://wiki.sponsor.ajay.app/w/Types / https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/postprocessor/sponsorblock.py
|
||||||
|
SPONSORBLOCK_CATEGORIES_CHOICES = (
|
||||||
|
('sponsor', 'Sponsor'),
|
||||||
|
('intro', 'Intermission/Intro Animation'),
|
||||||
|
('outro', 'Endcards/Credits'),
|
||||||
|
('selfpromo', 'Unpaid/Self Promotion'),
|
||||||
|
('preview', 'Preview/Recap'),
|
||||||
|
('filler', 'Filler Tangent'),
|
||||||
|
('interaction', 'Interaction Reminder'),
|
||||||
|
('music_offtopic', 'Non-Music Section'),
|
||||||
|
)
|
||||||
|
|
||||||
|
sponsorblock_categories = CommaSepChoiceField(
|
||||||
|
_(''),
|
||||||
|
possible_choices=SPONSORBLOCK_CATEGORIES_CHOICES,
|
||||||
|
all_choice='all',
|
||||||
|
allow_all=True,
|
||||||
|
all_label='(all options)',
|
||||||
|
default='all',
|
||||||
|
help_text=_('Select the sponsorblocks you want to enforce')
|
||||||
|
)
|
||||||
|
embed_metadata = models.BooleanField(
|
||||||
|
_('embed metadata'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Embed metadata from source into file')
|
||||||
|
)
|
||||||
|
embed_thumbnail = models.BooleanField(
|
||||||
|
_('embed thumbnail'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Embed thumbnail into the file')
|
||||||
|
)
|
||||||
|
enable_sponsorblock = models.BooleanField(
|
||||||
|
_('enable sponsorblock'),
|
||||||
|
default=True,
|
||||||
|
help_text=_('Use SponsorBlock?')
|
||||||
|
)
|
||||||
|
|
||||||
# Fontawesome icons used for the source on the front end
|
# Fontawesome icons used for the source on the front end
|
||||||
ICONS = {
|
ICONS = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
|
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
|
||||||
|
@ -112,6 +156,12 @@ class Source(models.Model):
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}',
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}',
|
||||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||||
}
|
}
|
||||||
|
# Format used to create indexable URLs
|
||||||
|
INDEX_URLS = {
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/c/{key}/videos',
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}/videos',
|
||||||
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||||
|
}
|
||||||
# Callback functions to get a list of media from the source
|
# Callback functions to get a list of media from the source
|
||||||
INDEXERS = {
|
INDEXERS = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||||
|
@ -146,6 +196,9 @@ class Source(models.Model):
|
||||||
EVERY_6_HOURS = 21600, _('Every 6 hours')
|
EVERY_6_HOURS = 21600, _('Every 6 hours')
|
||||||
EVERY_12_HOURS = 43200, _('Every 12 hours')
|
EVERY_12_HOURS = 43200, _('Every 12 hours')
|
||||||
EVERY_24_HOURS = 86400, _('Every 24 hours')
|
EVERY_24_HOURS = 86400, _('Every 24 hours')
|
||||||
|
EVERY_3_DAYS = 259200, _('Every 3 days')
|
||||||
|
EVERY_7_DAYS = 604800, _('Every 7 days')
|
||||||
|
NEVER = 0, _('Never')
|
||||||
|
|
||||||
uuid = models.UUIDField(
|
uuid = models.UUIDField(
|
||||||
_('uuid'),
|
_('uuid'),
|
||||||
|
@ -200,15 +253,20 @@ class Source(models.Model):
|
||||||
_('media format'),
|
_('media format'),
|
||||||
max_length=200,
|
max_length=200,
|
||||||
default=settings.MEDIA_FORMATSTR_DEFAULT,
|
default=settings.MEDIA_FORMATSTR_DEFAULT,
|
||||||
help_text=_('File format to use for saving files')
|
help_text=_('File format to use for saving files, detailed options at bottom of page.')
|
||||||
)
|
)
|
||||||
index_schedule = models.IntegerField(
|
index_schedule = models.IntegerField(
|
||||||
_('index schedule'),
|
_('index schedule'),
|
||||||
choices=IndexSchedule.choices,
|
choices=IndexSchedule.choices,
|
||||||
db_index=True,
|
db_index=True,
|
||||||
default=IndexSchedule.EVERY_6_HOURS,
|
default=IndexSchedule.EVERY_24_HOURS,
|
||||||
help_text=_('Schedule of how often to index the source for new media')
|
help_text=_('Schedule of how often to index the source for new media')
|
||||||
)
|
)
|
||||||
|
download_media = models.BooleanField(
|
||||||
|
_('download media'),
|
||||||
|
default=True,
|
||||||
|
help_text=_('Download media from this source, if not selected the source will only be indexed')
|
||||||
|
)
|
||||||
download_cap = models.IntegerField(
|
download_cap = models.IntegerField(
|
||||||
_('download cap'),
|
_('download cap'),
|
||||||
choices=CapChoices.choices,
|
choices=CapChoices.choices,
|
||||||
|
@ -226,6 +284,23 @@ class Source(models.Model):
|
||||||
help_text=_('If "delete old media" is ticked, the number of days after which '
|
help_text=_('If "delete old media" is ticked, the number of days after which '
|
||||||
'to automatically delete media')
|
'to automatically delete media')
|
||||||
)
|
)
|
||||||
|
filter_text = models.CharField(
|
||||||
|
_('filter string'),
|
||||||
|
max_length=100,
|
||||||
|
default='',
|
||||||
|
blank=True,
|
||||||
|
help_text=_('Regex compatible filter string for video titles')
|
||||||
|
)
|
||||||
|
delete_removed_media = models.BooleanField(
|
||||||
|
_('delete removed media'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Delete media that is no longer on this playlist')
|
||||||
|
)
|
||||||
|
delete_files_on_disk = models.BooleanField(
|
||||||
|
_('delete files on disk'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Delete files on disk when they are removed from TubeSync')
|
||||||
|
)
|
||||||
source_resolution = models.CharField(
|
source_resolution = models.CharField(
|
||||||
_('source resolution'),
|
_('source resolution'),
|
||||||
max_length=8,
|
max_length=8,
|
||||||
|
@ -278,12 +353,41 @@ class Source(models.Model):
|
||||||
default=False,
|
default=False,
|
||||||
help_text=_('Write an NFO file in XML with the media info, these may be detected and used by some media servers')
|
help_text=_('Write an NFO file in XML with the media info, these may be detected and used by some media servers')
|
||||||
)
|
)
|
||||||
|
write_json = models.BooleanField(
|
||||||
|
_('write json'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Write a JSON file with the media info, these may be detected and used by some media servers')
|
||||||
|
)
|
||||||
has_failed = models.BooleanField(
|
has_failed = models.BooleanField(
|
||||||
_('has failed'),
|
_('has failed'),
|
||||||
default=False,
|
default=False,
|
||||||
help_text=_('Source has failed to index media')
|
help_text=_('Source has failed to index media')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
write_subtitles = models.BooleanField(
|
||||||
|
_('write subtitles'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Download video subtitles')
|
||||||
|
)
|
||||||
|
|
||||||
|
auto_subtitles = models.BooleanField(
|
||||||
|
_('accept auto-generated subs'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Accept auto-generated subtitles')
|
||||||
|
)
|
||||||
|
sub_langs = models.CharField(
|
||||||
|
_('subs langs'),
|
||||||
|
max_length=30,
|
||||||
|
default='en',
|
||||||
|
help_text=_('List of subtitles langs to download, comma-separated. Example: en,fr or all,-fr,-live_chat'),
|
||||||
|
validators=[
|
||||||
|
RegexValidator(
|
||||||
|
regex=r"^(\-?[\_\.a-zA-Z]+,)*(\-?[\_\.a-zA-Z]+){1}$",
|
||||||
|
message=_('Subtitle langs must be a comma-separated list of langs. example: en,fr or all,-fr,-live_chat')
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
@ -327,23 +431,32 @@ class Source(models.Model):
|
||||||
'''
|
'''
|
||||||
if self.is_audio:
|
if self.is_audio:
|
||||||
if self.source_acodec == self.SOURCE_ACODEC_MP4A:
|
if self.source_acodec == self.SOURCE_ACODEC_MP4A:
|
||||||
return 'm4a'
|
return self.EXTENSION_M4A
|
||||||
elif self.source_acodec == self.SOURCE_ACODEC_OPUS:
|
elif self.source_acodec == self.SOURCE_ACODEC_OPUS:
|
||||||
return 'ogg'
|
return self.EXTENSION_OGG
|
||||||
else:
|
else:
|
||||||
raise ValueError('Unable to choose audio extension, uknown acodec')
|
raise ValueError('Unable to choose audio extension, uknown acodec')
|
||||||
else:
|
else:
|
||||||
return 'mkv'
|
return self.EXTENSION_MKV
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_url(obj, source_type, key):
|
def create_url(obj, source_type, key):
|
||||||
url = obj.URLS.get(source_type)
|
url = obj.URLS.get(source_type)
|
||||||
return url.format(key=key)
|
return url.format(key=key)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_index_url(obj, source_type, key):
|
||||||
|
url = obj.INDEX_URLS.get(source_type)
|
||||||
|
return url.format(key=key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
return Source.create_url(self.source_type, self.key)
|
return Source.create_url(self.source_type, self.key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def index_url(self):
|
||||||
|
return Source.create_index_url(self.source_type, self.key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def format_summary(self):
|
def format_summary(self):
|
||||||
if self.source_resolution == Source.SOURCE_RESOLUTION_AUDIO:
|
if self.source_resolution == Source.SOURCE_RESOLUTION_AUDIO:
|
||||||
|
@ -358,10 +471,14 @@ class Source(models.Model):
|
||||||
@property
|
@property
|
||||||
def directory_path(self):
|
def directory_path(self):
|
||||||
download_dir = Path(media_file_storage.location)
|
download_dir = Path(media_file_storage.location)
|
||||||
|
return download_dir / self.type_directory_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type_directory_path(self):
|
||||||
if self.source_resolution == self.SOURCE_RESOLUTION_AUDIO:
|
if self.source_resolution == self.SOURCE_RESOLUTION_AUDIO:
|
||||||
return download_dir / settings.DOWNLOAD_AUDIO_DIR / self.directory
|
return Path(settings.DOWNLOAD_AUDIO_DIR) / self.directory
|
||||||
else:
|
else:
|
||||||
return download_dir / settings.DOWNLOAD_VIDEO_DIR / self.directory
|
return Path(settings.DOWNLOAD_VIDEO_DIR) / self.directory
|
||||||
|
|
||||||
def make_directory(self):
|
def make_directory(self):
|
||||||
return os.makedirs(self.directory_path, exist_ok=True)
|
return os.makedirs(self.directory_path, exist_ok=True)
|
||||||
|
@ -399,19 +516,20 @@ class Source(models.Model):
|
||||||
fmt.append('60fps')
|
fmt.append('60fps')
|
||||||
if self.prefer_hdr:
|
if self.prefer_hdr:
|
||||||
fmt.append('hdr')
|
fmt.append('hdr')
|
||||||
|
now = timezone.now()
|
||||||
return {
|
return {
|
||||||
'yyyymmdd': timezone.now().strftime('%Y%m%d'),
|
'yyyymmdd': now.strftime('%Y%m%d'),
|
||||||
'yyyy_mm_dd': timezone.now().strftime('%Y-%m-%d'),
|
'yyyy_mm_dd': now.strftime('%Y-%m-%d'),
|
||||||
'yyyy': timezone.now().strftime('%Y'),
|
'yyyy': now.strftime('%Y'),
|
||||||
'mm': timezone.now().strftime('%m'),
|
'mm': now.strftime('%m'),
|
||||||
'dd': timezone.now().strftime('%d'),
|
'dd': now.strftime('%d'),
|
||||||
'source': self.slugname,
|
'source': self.slugname,
|
||||||
'source_full': self.name,
|
'source_full': self.name,
|
||||||
|
'uploader': 'Some Channel Name',
|
||||||
'title': 'some-media-title-name',
|
'title': 'some-media-title-name',
|
||||||
'title_full': 'Some Media Title Name',
|
'title_full': 'Some Media Title Name',
|
||||||
'key': 'SoMeUnIqUiD',
|
'key': 'SoMeUnIqUiD',
|
||||||
'format': '-'.join(fmt),
|
'format': '-'.join(fmt),
|
||||||
'playlist_index': 1,
|
|
||||||
'playlist_title': 'Some Playlist Title',
|
'playlist_title': 'Some Playlist Title',
|
||||||
'ext': self.extension,
|
'ext': self.extension,
|
||||||
'resolution': self.source_resolution if self.source_resolution else '',
|
'resolution': self.source_resolution if self.source_resolution else '',
|
||||||
|
@ -429,6 +547,11 @@ class Source(models.Model):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
def is_regex_match(self, media_item_title):
|
||||||
|
if not self.filter_text:
|
||||||
|
return True
|
||||||
|
return bool(re.search(self.filter_text, media_item_title))
|
||||||
|
|
||||||
def index_media(self):
|
def index_media(self):
|
||||||
'''
|
'''
|
||||||
Index the media source returning a list of media metadata as dicts.
|
Index the media source returning a list of media metadata as dicts.
|
||||||
|
@ -436,25 +559,14 @@ class Source(models.Model):
|
||||||
indexer = self.INDEXERS.get(self.source_type, None)
|
indexer = self.INDEXERS.get(self.source_type, None)
|
||||||
if not callable(indexer):
|
if not callable(indexer):
|
||||||
raise Exception(f'Source type f"{self.source_type}" has no indexer')
|
raise Exception(f'Source type f"{self.source_type}" has no indexer')
|
||||||
response = indexer(self.url)
|
response = indexer(self.index_url)
|
||||||
|
if not isinstance(response, dict):
|
||||||
|
return []
|
||||||
|
entries = response.get('entries', [])
|
||||||
|
|
||||||
# Account for nested playlists, such as a channel of playlists of playlists
|
if settings.MAX_ENTRIES_PROCESSING:
|
||||||
def _recurse_playlists(playlist):
|
entries = entries[:settings.MAX_ENTRIES_PROCESSING]
|
||||||
videos = []
|
return entries
|
||||||
if not playlist:
|
|
||||||
return videos
|
|
||||||
entries = playlist.get('entries', [])
|
|
||||||
for entry in entries:
|
|
||||||
if not entry:
|
|
||||||
continue
|
|
||||||
subentries = entry.get('entries', [])
|
|
||||||
if subentries:
|
|
||||||
videos = videos + _recurse_playlists(entry)
|
|
||||||
else:
|
|
||||||
videos.append(entry)
|
|
||||||
return videos
|
|
||||||
|
|
||||||
return _recurse_playlists(response)
|
|
||||||
|
|
||||||
|
|
||||||
def get_media_thumb_path(instance, filename):
|
def get_media_thumb_path(instance, filename):
|
||||||
|
@ -480,6 +592,12 @@ class Media(models.Model):
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/watch?v={key}',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/watch?v={key}',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/watch?v={key}',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/watch?v={key}',
|
||||||
}
|
}
|
||||||
|
# Callback functions to get a list of media from the source
|
||||||
|
INDEXERS = {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: get_youtube_media_info,
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: get_youtube_media_info,
|
||||||
|
}
|
||||||
# Maps standardised names to names used in source metdata
|
# Maps standardised names to names used in source metdata
|
||||||
METADATA_FIELDS = {
|
METADATA_FIELDS = {
|
||||||
'upload_date': {
|
'upload_date': {
|
||||||
|
@ -542,11 +660,6 @@ class Media(models.Model):
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'dislike_count',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'dislike_count',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'dislike_count',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'dislike_count',
|
||||||
},
|
},
|
||||||
'playlist_index': {
|
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'playlist_index',
|
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'playlist_index',
|
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'playlist_index',
|
|
||||||
},
|
|
||||||
'playlist_title': {
|
'playlist_title': {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'playlist_title',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'playlist_title',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'playlist_title',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'playlist_title',
|
||||||
|
@ -557,14 +670,18 @@ class Media(models.Model):
|
||||||
STATE_SCHEDULED = 'scheduled'
|
STATE_SCHEDULED = 'scheduled'
|
||||||
STATE_DOWNLOADING = 'downloading'
|
STATE_DOWNLOADING = 'downloading'
|
||||||
STATE_DOWNLOADED = 'downloaded'
|
STATE_DOWNLOADED = 'downloaded'
|
||||||
|
STATE_SKIPPED = 'skipped'
|
||||||
|
STATE_DISABLED_AT_SOURCE = 'source-disabled'
|
||||||
STATE_ERROR = 'error'
|
STATE_ERROR = 'error'
|
||||||
STATES = (STATE_UNKNOWN, STATE_SCHEDULED, STATE_DOWNLOADING, STATE_DOWNLOADED,
|
STATES = (STATE_UNKNOWN, STATE_SCHEDULED, STATE_DOWNLOADING, STATE_DOWNLOADED,
|
||||||
STATE_ERROR)
|
STATE_SKIPPED, STATE_DISABLED_AT_SOURCE, STATE_ERROR)
|
||||||
STATE_ICONS = {
|
STATE_ICONS = {
|
||||||
STATE_UNKNOWN: '<i class="far fa-question-circle" title="Unknown download state"></i>',
|
STATE_UNKNOWN: '<i class="far fa-question-circle" title="Unknown download state"></i>',
|
||||||
STATE_SCHEDULED: '<i class="far fa-clock" title="Scheduled to download"></i>',
|
STATE_SCHEDULED: '<i class="far fa-clock" title="Scheduled to download"></i>',
|
||||||
STATE_DOWNLOADING: '<i class="fas fa-download" title="Downloading now"></i>',
|
STATE_DOWNLOADING: '<i class="fas fa-download" title="Downloading now"></i>',
|
||||||
STATE_DOWNLOADED: '<i class="far fa-check-circle" title="Downloaded"></i>',
|
STATE_DOWNLOADED: '<i class="far fa-check-circle" title="Downloaded"></i>',
|
||||||
|
STATE_SKIPPED: '<i class="fas fa-exclamation-circle" title="Skipped"></i>',
|
||||||
|
STATE_DISABLED_AT_SOURCE: '<i class="fas fa-stop-circle" title="Media downloading disabled at source"></i>',
|
||||||
STATE_ERROR: '<i class="fas fa-exclamation-triangle" title="Error downloading"></i>',
|
STATE_ERROR: '<i class="fas fa-exclamation-triangle" title="Error downloading"></i>',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -637,7 +754,7 @@ class Media(models.Model):
|
||||||
media_file = models.FileField(
|
media_file = models.FileField(
|
||||||
_('media file'),
|
_('media file'),
|
||||||
upload_to=get_media_file_path,
|
upload_to=get_media_file_path,
|
||||||
max_length=200,
|
max_length=255,
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
storage=media_file_storage,
|
storage=media_file_storage,
|
||||||
|
@ -647,7 +764,13 @@ class Media(models.Model):
|
||||||
_('skip'),
|
_('skip'),
|
||||||
db_index=True,
|
db_index=True,
|
||||||
default=False,
|
default=False,
|
||||||
help_text=_('Media will be skipped and not downloaded')
|
help_text=_('INTERNAL FLAG - Media will be skipped and not downloaded')
|
||||||
|
)
|
||||||
|
manual_skip = models.BooleanField(
|
||||||
|
_('manual_skip'),
|
||||||
|
db_index=True,
|
||||||
|
default=False,
|
||||||
|
help_text=_('Media marked as "skipped", won\' be downloaded')
|
||||||
)
|
)
|
||||||
downloaded = models.BooleanField(
|
downloaded = models.BooleanField(
|
||||||
_('downloaded'),
|
_('downloaded'),
|
||||||
|
@ -667,7 +790,7 @@ class Media(models.Model):
|
||||||
max_length=30,
|
max_length=30,
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
help_text=_('Audio codec of the downloaded media')
|
help_text=_('Video format (resolution) of the downloaded media')
|
||||||
)
|
)
|
||||||
downloaded_height = models.PositiveIntegerField(
|
downloaded_height = models.PositiveIntegerField(
|
||||||
_('downloaded height'),
|
_('downloaded height'),
|
||||||
|
@ -794,6 +917,23 @@ class Media(models.Model):
|
||||||
hdr = ''
|
hdr = ''
|
||||||
# If the download has completed use existing values
|
# If the download has completed use existing values
|
||||||
if self.downloaded:
|
if self.downloaded:
|
||||||
|
# Check if there's any stored meta data at all
|
||||||
|
if (not self.downloaded_video_codec and \
|
||||||
|
not self.downloaded_audio_codec):
|
||||||
|
# Marked as downloaded but no metadata, imported?
|
||||||
|
return {
|
||||||
|
'resolution': resolution,
|
||||||
|
'height': height,
|
||||||
|
'width': width,
|
||||||
|
'vcodec': vcodec,
|
||||||
|
'acodec': acodec,
|
||||||
|
'fps': fps,
|
||||||
|
'hdr': hdr,
|
||||||
|
'format': tuple(fmt),
|
||||||
|
}
|
||||||
|
if self.downloaded_format:
|
||||||
|
resolution = self.downloaded_format.lower()
|
||||||
|
elif self.downloaded_height:
|
||||||
resolution = f'{self.downloaded_height}p'
|
resolution = f'{self.downloaded_height}p'
|
||||||
if self.downloaded_format != 'audio':
|
if self.downloaded_format != 'audio':
|
||||||
vcodec = self.downloaded_video_codec.lower()
|
vcodec = self.downloaded_video_codec.lower()
|
||||||
|
@ -821,7 +961,7 @@ class Media(models.Model):
|
||||||
# Otherwise, calculate from matched format codes
|
# Otherwise, calculate from matched format codes
|
||||||
vformat = None
|
vformat = None
|
||||||
aformat = None
|
aformat = None
|
||||||
if '+' in format_str:
|
if format_str and '+' in format_str:
|
||||||
# Seperate audio and video streams
|
# Seperate audio and video streams
|
||||||
vformat_code, aformat_code = format_str.split('+')
|
vformat_code, aformat_code = format_str.split('+')
|
||||||
vformat = self.get_format_by_code(vformat_code)
|
vformat = self.get_format_by_code(vformat_code)
|
||||||
|
@ -830,7 +970,7 @@ class Media(models.Model):
|
||||||
# Combined stream or audio only
|
# Combined stream or audio only
|
||||||
cformat = self.get_format_by_code(format_str)
|
cformat = self.get_format_by_code(format_str)
|
||||||
aformat = cformat
|
aformat = cformat
|
||||||
if cformat['vcodec']:
|
if cformat and cformat['vcodec']:
|
||||||
# Combined
|
# Combined
|
||||||
vformat = cformat
|
vformat = cformat
|
||||||
if vformat:
|
if vformat:
|
||||||
|
@ -838,6 +978,7 @@ class Media(models.Model):
|
||||||
fmt.append(resolution)
|
fmt.append(resolution)
|
||||||
vcodec = vformat['vcodec'].lower()
|
vcodec = vformat['vcodec'].lower()
|
||||||
fmt.append(vcodec)
|
fmt.append(vcodec)
|
||||||
|
if aformat:
|
||||||
acodec = aformat['acodec'].lower()
|
acodec = aformat['acodec'].lower()
|
||||||
fmt.append(acodec)
|
fmt.append(acodec)
|
||||||
if vformat:
|
if vformat:
|
||||||
|
@ -887,10 +1028,9 @@ class Media(models.Model):
|
||||||
'source': self.source.slugname,
|
'source': self.source.slugname,
|
||||||
'source_full': self.source.name,
|
'source_full': self.source.name,
|
||||||
'title': self.slugtitle,
|
'title': self.slugtitle,
|
||||||
'title_full': self.title,
|
'title_full': clean_filename(self.title),
|
||||||
'key': self.key,
|
'key': self.key,
|
||||||
'format': '-'.join(display_format['format']),
|
'format': '-'.join(display_format['format']),
|
||||||
'playlist_index': self.playlist_index,
|
|
||||||
'playlist_title': self.playlist_title,
|
'playlist_title': self.playlist_title,
|
||||||
'ext': self.source.extension,
|
'ext': self.source.extension,
|
||||||
'resolution': display_format['resolution'],
|
'resolution': display_format['resolution'],
|
||||||
|
@ -900,12 +1040,20 @@ class Media(models.Model):
|
||||||
'acodec': display_format['acodec'],
|
'acodec': display_format['acodec'],
|
||||||
'fps': display_format['fps'],
|
'fps': display_format['fps'],
|
||||||
'hdr': display_format['hdr'],
|
'hdr': display_format['hdr'],
|
||||||
|
'uploader': self.uploader,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_metadata(self):
|
||||||
|
return self.metadata is not None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def loaded_metadata(self):
|
def loaded_metadata(self):
|
||||||
try:
|
try:
|
||||||
return json.loads(self.metadata)
|
data = json.loads(self.metadata)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return {}
|
||||||
|
return data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
@ -942,7 +1090,10 @@ class Media(models.Model):
|
||||||
@property
|
@property
|
||||||
def upload_date(self):
|
def upload_date(self):
|
||||||
field = self.get_metadata_field('upload_date')
|
field = self.get_metadata_field('upload_date')
|
||||||
|
try:
|
||||||
upload_date_str = self.loaded_metadata.get(field, '').strip()
|
upload_date_str = self.loaded_metadata.get(field, '').strip()
|
||||||
|
except (AttributeError, ValueError) as e:
|
||||||
|
return None
|
||||||
try:
|
try:
|
||||||
return datetime.strptime(upload_date_str, '%Y%m%d')
|
return datetime.strptime(upload_date_str, '%Y%m%d')
|
||||||
except (AttributeError, ValueError) as e:
|
except (AttributeError, ValueError) as e:
|
||||||
|
@ -951,7 +1102,12 @@ class Media(models.Model):
|
||||||
@property
|
@property
|
||||||
def duration(self):
|
def duration(self):
|
||||||
field = self.get_metadata_field('duration')
|
field = self.get_metadata_field('duration')
|
||||||
return int(self.loaded_metadata.get(field, 0))
|
duration = self.loaded_metadata.get(field, 0)
|
||||||
|
try:
|
||||||
|
duration = int(duration)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
duration = 0
|
||||||
|
return duration
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def duration_formatted(self):
|
def duration_formatted(self):
|
||||||
|
@ -974,8 +1130,12 @@ class Media(models.Model):
|
||||||
def votes(self):
|
def votes(self):
|
||||||
field = self.get_metadata_field('upvotes')
|
field = self.get_metadata_field('upvotes')
|
||||||
upvotes = self.loaded_metadata.get(field, 0)
|
upvotes = self.loaded_metadata.get(field, 0)
|
||||||
|
if not isinstance(upvotes, int):
|
||||||
|
upvotes = 0
|
||||||
field = self.get_metadata_field('downvotes')
|
field = self.get_metadata_field('downvotes')
|
||||||
downvotes = self.loaded_metadata.get(field, 0)
|
downvotes = self.loaded_metadata.get(field, 0)
|
||||||
|
if not isinstance(downvotes, int):
|
||||||
|
downvotes = 0
|
||||||
return upvotes + downvotes
|
return upvotes + downvotes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -993,11 +1153,6 @@ class Media(models.Model):
|
||||||
field = self.get_metadata_field('formats')
|
field = self.get_metadata_field('formats')
|
||||||
return self.loaded_metadata.get(field, [])
|
return self.loaded_metadata.get(field, [])
|
||||||
|
|
||||||
@property
|
|
||||||
def playlist_index(self):
|
|
||||||
field = self.get_metadata_field('playlist_index')
|
|
||||||
return self.loaded_metadata.get(field, 0)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def playlist_title(self):
|
def playlist_title(self):
|
||||||
field = self.get_metadata_field('playlist_title')
|
field = self.get_metadata_field('playlist_title')
|
||||||
|
@ -1005,13 +1160,16 @@ class Media(models.Model):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def filename(self):
|
def filename(self):
|
||||||
# Otherwise, create a suitable filename from the source media_format
|
# Create a suitable filename from the source media_format
|
||||||
media_format = str(self.source.media_format)
|
media_format = str(self.source.media_format)
|
||||||
media_details = self.format_dict
|
media_details = self.format_dict
|
||||||
return media_format.format(**media_details)
|
return media_format.format(**media_details)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def thumbname(self):
|
def thumbname(self):
|
||||||
|
if self.downloaded and self.media_file:
|
||||||
|
filename = os.path.basename(self.media_file.path)
|
||||||
|
else:
|
||||||
filename = self.filename
|
filename = self.filename
|
||||||
prefix, ext = os.path.splitext(filename)
|
prefix, ext = os.path.splitext(filename)
|
||||||
return f'{prefix}.jpg'
|
return f'{prefix}.jpg'
|
||||||
|
@ -1022,6 +1180,9 @@ class Media(models.Model):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def nfoname(self):
|
def nfoname(self):
|
||||||
|
if self.downloaded and self.media_file:
|
||||||
|
filename = os.path.basename(self.media_file.path)
|
||||||
|
else:
|
||||||
filename = self.filename
|
filename = self.filename
|
||||||
prefix, ext = os.path.splitext(filename)
|
prefix, ext = os.path.splitext(filename)
|
||||||
return f'{prefix}.nfo'
|
return f'{prefix}.nfo'
|
||||||
|
@ -1030,6 +1191,19 @@ class Media(models.Model):
|
||||||
def nfopath(self):
|
def nfopath(self):
|
||||||
return self.source.directory_path / self.nfoname
|
return self.source.directory_path / self.nfoname
|
||||||
|
|
||||||
|
@property
|
||||||
|
def jsonname(self):
|
||||||
|
if self.downloaded and self.media_file:
|
||||||
|
filename = os.path.basename(self.media_file.path)
|
||||||
|
else:
|
||||||
|
filename = self.filename
|
||||||
|
prefix, ext = os.path.splitext(filename)
|
||||||
|
return f'{prefix}.info.json'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def jsonpath(self):
|
||||||
|
return self.source.directory_path / self.jsonname
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def directory_path(self):
|
def directory_path(self):
|
||||||
# Otherwise, create a suitable filename from the source media_format
|
# Otherwise, create a suitable filename from the source media_format
|
||||||
|
@ -1054,6 +1228,29 @@ class Media(models.Model):
|
||||||
return False
|
return False
|
||||||
return os.path.exists(self.media_file.path)
|
return os.path.exists(self.media_file.path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content_type(self):
|
||||||
|
if not self.downloaded:
|
||||||
|
return 'video/mp4'
|
||||||
|
vcodec = self.downloaded_video_codec
|
||||||
|
if vcodec is None:
|
||||||
|
acodec = self.downloaded_audio_codec
|
||||||
|
if acodec is None:
|
||||||
|
raise TypeError() # nothing here.
|
||||||
|
acodec = acodec.lower()
|
||||||
|
if acodec == "mp4a":
|
||||||
|
return "audio/mp4"
|
||||||
|
elif acodec == "opus":
|
||||||
|
return "audio/opus"
|
||||||
|
else:
|
||||||
|
# fall-fall-back.
|
||||||
|
return 'audio/ogg'
|
||||||
|
vcodec = vcodec.lower()
|
||||||
|
if vcodec == 'vp9':
|
||||||
|
return 'video/webm'
|
||||||
|
else:
|
||||||
|
return 'video/mp4'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def nfoxml(self):
|
def nfoxml(self):
|
||||||
'''
|
'''
|
||||||
|
@ -1071,6 +1268,22 @@ class Media(models.Model):
|
||||||
showtitle.text = str(self.source.name).strip()
|
showtitle.text = str(self.source.name).strip()
|
||||||
showtitle.tail = '\n '
|
showtitle.tail = '\n '
|
||||||
nfo.append(showtitle)
|
nfo.append(showtitle)
|
||||||
|
# season = upload date year
|
||||||
|
season = nfo.makeelement('season', {})
|
||||||
|
if self.source.source_type == Source.SOURCE_TYPE_YOUTUBE_PLAYLIST:
|
||||||
|
# If it's a playlist, set season to 1
|
||||||
|
season.text = '1'
|
||||||
|
else:
|
||||||
|
# If it's not a playlist, set season to upload date year
|
||||||
|
season.text = str(self.upload_date.year) if self.upload_date else ''
|
||||||
|
season.tail = '\n '
|
||||||
|
nfo.append(season)
|
||||||
|
# episode = number of video in the year
|
||||||
|
episode = nfo.makeelement('episode', {})
|
||||||
|
episode_number = self.calculate_episode_number()
|
||||||
|
episode.text = str(episode_number) if episode_number else ''
|
||||||
|
episode.tail = '\n '
|
||||||
|
nfo.append(episode)
|
||||||
# ratings = media metadata youtube rating
|
# ratings = media metadata youtube rating
|
||||||
value = nfo.makeelement('value', {})
|
value = nfo.makeelement('value', {})
|
||||||
value.text = str(self.rating)
|
value.text = str(self.rating)
|
||||||
|
@ -1161,6 +1374,10 @@ class Media(models.Model):
|
||||||
return self.STATE_ERROR
|
return self.STATE_ERROR
|
||||||
else:
|
else:
|
||||||
return self.STATE_SCHEDULED
|
return self.STATE_SCHEDULED
|
||||||
|
if self.skip:
|
||||||
|
return self.STATE_SKIPPED
|
||||||
|
if not self.source.download_media:
|
||||||
|
return self.STATE_DISABLED_AT_SOURCE
|
||||||
return self.STATE_UNKNOWN
|
return self.STATE_UNKNOWN
|
||||||
|
|
||||||
def get_download_state_icon(self, task=None):
|
def get_download_state_icon(self, task=None):
|
||||||
|
@ -1174,10 +1391,36 @@ class Media(models.Model):
|
||||||
f'no valid format available')
|
f'no valid format available')
|
||||||
# Download the media with youtube-dl
|
# Download the media with youtube-dl
|
||||||
download_youtube_media(self.url, format_str, self.source.extension,
|
download_youtube_media(self.url, format_str, self.source.extension,
|
||||||
str(self.filepath))
|
str(self.filepath), self.source.write_json,
|
||||||
|
self.source.sponsorblock_categories.selected_choices, self.source.embed_thumbnail,
|
||||||
|
self.source.embed_metadata, self.source.enable_sponsorblock,
|
||||||
|
self.source.write_subtitles, self.source.auto_subtitles,self.source.sub_langs )
|
||||||
# Return the download paramaters
|
# Return the download paramaters
|
||||||
return format_str, self.source.extension
|
return format_str, self.source.extension
|
||||||
|
|
||||||
|
def index_metadata(self):
|
||||||
|
'''
|
||||||
|
Index the media metadata returning a dict of info.
|
||||||
|
'''
|
||||||
|
indexer = self.INDEXERS.get(self.source.source_type, None)
|
||||||
|
if not callable(indexer):
|
||||||
|
raise Exception(f'Media with source type f"{self.source.source_type}" '
|
||||||
|
f'has no indexer')
|
||||||
|
return indexer(self.url)
|
||||||
|
|
||||||
|
def calculate_episode_number(self):
|
||||||
|
if self.source.source_type == Source.SOURCE_TYPE_YOUTUBE_PLAYLIST:
|
||||||
|
sorted_media = Media.objects.filter(source=self.source)
|
||||||
|
else:
|
||||||
|
self_year = self.upload_date.year if self.upload_date else self.created.year
|
||||||
|
filtered_media = Media.objects.filter(source=self.source, published__year=self_year)
|
||||||
|
sorted_media = sorted(filtered_media, key=lambda x: (x.upload_date, x.key))
|
||||||
|
position_counter = 1
|
||||||
|
for media in sorted_media:
|
||||||
|
if media == self:
|
||||||
|
return position_counter
|
||||||
|
position_counter += 1
|
||||||
|
|
||||||
|
|
||||||
class MediaServer(models.Model):
|
class MediaServer(models.Model):
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
import glob
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete
|
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
@ -8,8 +9,9 @@ from background_task.models import Task
|
||||||
from common.logger import log
|
from common.logger import log
|
||||||
from .models import Source, Media, MediaServer
|
from .models import Source, Media, MediaServer
|
||||||
from .tasks import (delete_task_by_source, delete_task_by_media, index_source_task,
|
from .tasks import (delete_task_by_source, delete_task_by_media, index_source_task,
|
||||||
download_media_thumbnail, map_task_to_instance,
|
download_media_thumbnail, download_media_metadata,
|
||||||
check_source_directory_exists, download_media, rescan_media_server)
|
map_task_to_instance, check_source_directory_exists,
|
||||||
|
download_media, rescan_media_server)
|
||||||
from .utils import delete_file
|
from .utils import delete_file
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,6 +48,7 @@ def source_post_save(sender, instance, created, **kwargs):
|
||||||
priority=0,
|
priority=0,
|
||||||
verbose_name=verbose_name.format(instance.name)
|
verbose_name=verbose_name.format(instance.name)
|
||||||
)
|
)
|
||||||
|
if instance.index_schedule > 0:
|
||||||
delete_task_by_source('sync.tasks.index_source_task', instance.pk)
|
delete_task_by_source('sync.tasks.index_source_task', instance.pk)
|
||||||
log.info(f'Scheduling media indexing for source: {instance.name}')
|
log.info(f'Scheduling media indexing for source: {instance.name}')
|
||||||
verbose_name = _('Index media from source "{}"')
|
verbose_name = _('Index media from source "{}"')
|
||||||
|
@ -72,6 +75,7 @@ def source_pre_delete(sender, instance, **kwargs):
|
||||||
media.delete()
|
media.delete()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=Source)
|
@receiver(post_delete, sender=Source)
|
||||||
def source_post_delete(sender, instance, **kwargs):
|
def source_post_delete(sender, instance, **kwargs):
|
||||||
# Triggered after a source is deleted
|
# Triggered after a source is deleted
|
||||||
|
@ -91,18 +95,92 @@ def task_task_failed(sender, task_id, completed_task, **kwargs):
|
||||||
|
|
||||||
@receiver(post_save, sender=Media)
|
@receiver(post_save, sender=Media)
|
||||||
def media_post_save(sender, instance, created, **kwargs):
|
def media_post_save(sender, instance, created, **kwargs):
|
||||||
# Triggered after media is saved, Recalculate the "can_download" flag, this may
|
# If the media is skipped manually, bail.
|
||||||
|
if instance.manual_skip:
|
||||||
|
return
|
||||||
|
# Triggered after media is saved
|
||||||
|
cap_changed = False
|
||||||
|
can_download_changed = False
|
||||||
|
# Reset the skip flag if the download cap has changed if the media has not
|
||||||
|
# already been downloaded
|
||||||
|
if not instance.downloaded and instance.metadata:
|
||||||
|
max_cap_age = instance.source.download_cap_date
|
||||||
|
filter_text = instance.source.filter_text.strip()
|
||||||
|
published = instance.published
|
||||||
|
if not published:
|
||||||
|
if not instance.skip:
|
||||||
|
log.warn(f'Media: {instance.source} / {instance} has no published date '
|
||||||
|
f'set, marking to be skipped')
|
||||||
|
instance.skip = True
|
||||||
|
cap_changed = True
|
||||||
|
else:
|
||||||
|
log.debug(f'Media: {instance.source} / {instance} has no published date '
|
||||||
|
f'set but is already marked to be skipped')
|
||||||
|
else:
|
||||||
|
if max_cap_age:
|
||||||
|
if published > max_cap_age and instance.skip:
|
||||||
|
if filter_text:
|
||||||
|
if instance.source.is_regex_match(instance.title):
|
||||||
|
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||||
|
f'publishing date and title filter, marking to be unskipped')
|
||||||
|
instance.skip = False
|
||||||
|
cap_changed = True
|
||||||
|
else:
|
||||||
|
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date '
|
||||||
|
f'but failed the title filter match, already marked skipped')
|
||||||
|
else:
|
||||||
|
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||||
|
f'publishing date, marking to be unskipped')
|
||||||
|
instance.skip = False
|
||||||
|
cap_changed = True
|
||||||
|
elif published <= max_cap_age and not instance.skip:
|
||||||
|
log.info(f'Media: {instance.source} / {instance} is too old for '
|
||||||
|
f'the download cap date, marking to be skipped')
|
||||||
|
instance.skip = True
|
||||||
|
cap_changed = True
|
||||||
|
else:
|
||||||
|
if instance.skip:
|
||||||
|
# Media marked to be skipped but source download cap removed
|
||||||
|
if filter_text:
|
||||||
|
if instance.source.is_regex_match(instance.title):
|
||||||
|
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||||
|
f'publishing date and title filter, marking to be unskipped')
|
||||||
|
instance.skip = False
|
||||||
|
cap_changed = True
|
||||||
|
else:
|
||||||
|
log.info(f'Media: {instance.source} / {instance} has a valid publishing date '
|
||||||
|
f'but failed the title filter match, already marked skipped')
|
||||||
|
else:
|
||||||
|
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date and '
|
||||||
|
f'is already marked as not to be skipped')
|
||||||
|
|
||||||
|
cap_changed = False
|
||||||
|
# Recalculate the "can_download" flag, this may
|
||||||
# need to change if the source specifications have been changed
|
# need to change if the source specifications have been changed
|
||||||
post_save.disconnect(media_post_save, sender=Media)
|
if instance.metadata:
|
||||||
if instance.get_format_str():
|
if instance.get_format_str():
|
||||||
if not instance.can_download:
|
if not instance.can_download:
|
||||||
instance.can_download = True
|
instance.can_download = True
|
||||||
instance.save()
|
can_download_changed = True
|
||||||
else:
|
else:
|
||||||
if instance.can_download:
|
if instance.can_download:
|
||||||
instance.can_download = False
|
instance.can_download = False
|
||||||
|
can_download_changed = True
|
||||||
|
# Save the instance if any changes were required
|
||||||
|
if cap_changed or can_download_changed:
|
||||||
|
post_save.disconnect(media_post_save, sender=Media)
|
||||||
instance.save()
|
instance.save()
|
||||||
post_save.connect(media_post_save, sender=Media)
|
post_save.connect(media_post_save, sender=Media)
|
||||||
|
# If the media is missing metadata schedule it to be downloaded
|
||||||
|
if not instance.metadata:
|
||||||
|
log.info(f'Scheduling task to download metadata for: {instance.url}')
|
||||||
|
verbose_name = _('Downloading metadata for "{}"')
|
||||||
|
download_media_metadata(
|
||||||
|
str(instance.pk),
|
||||||
|
priority=5,
|
||||||
|
verbose_name=verbose_name.format(instance.pk),
|
||||||
|
remove_existing_tasks=True
|
||||||
|
)
|
||||||
# If the media is missing a thumbnail schedule it to be downloaded
|
# If the media is missing a thumbnail schedule it to be downloaded
|
||||||
if not instance.thumb_file_exists:
|
if not instance.thumb_file_exists:
|
||||||
instance.thumb = None
|
instance.thumb = None
|
||||||
|
@ -124,7 +202,8 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||||
if not instance.media_file_exists:
|
if not instance.media_file_exists:
|
||||||
instance.downloaded = False
|
instance.downloaded = False
|
||||||
instance.media_file = None
|
instance.media_file = None
|
||||||
if not instance.downloaded and instance.can_download and not instance.skip:
|
if (not instance.downloaded and instance.can_download and not instance.skip
|
||||||
|
and instance.source.download_media):
|
||||||
delete_task_by_media('sync.tasks.download_media', (str(instance.pk),))
|
delete_task_by_media('sync.tasks.download_media', (str(instance.pk),))
|
||||||
verbose_name = _('Downloading media for "{}"')
|
verbose_name = _('Downloading media for "{}"')
|
||||||
download_media(
|
download_media(
|
||||||
|
@ -145,6 +224,16 @@ def media_pre_delete(sender, instance, **kwargs):
|
||||||
if thumbnail_url:
|
if thumbnail_url:
|
||||||
delete_task_by_media('sync.tasks.download_media_thumbnail',
|
delete_task_by_media('sync.tasks.download_media_thumbnail',
|
||||||
(str(instance.pk), thumbnail_url))
|
(str(instance.pk), thumbnail_url))
|
||||||
|
if instance.source.delete_files_on_disk and (instance.media_file or instance.thumb):
|
||||||
|
# Delete all media files if it contains filename
|
||||||
|
filepath = instance.media_file.path if instance.media_file else instance.thumb.path
|
||||||
|
barefilepath, fileext = os.path.splitext(filepath)
|
||||||
|
# Get all files that start with the bare file path
|
||||||
|
all_related_files = glob.glob(f'{barefilepath}.*')
|
||||||
|
for file in all_related_files:
|
||||||
|
log.info(f'Deleting file for: {instance} path: {file}')
|
||||||
|
delete_file(file)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=Media)
|
@receiver(post_delete, sender=Media)
|
||||||
|
|
|
@ -10,7 +10,7 @@ import math
|
||||||
import uuid
|
import uuid
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
from datetime import timedelta
|
from datetime import timedelta, datetime
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -22,6 +22,7 @@ from background_task import background
|
||||||
from background_task.models import Task, CompletedTask
|
from background_task.models import Task, CompletedTask
|
||||||
from common.logger import log
|
from common.logger import log
|
||||||
from common.errors import NoMediaException, DownloadFailedException
|
from common.errors import NoMediaException, DownloadFailedException
|
||||||
|
from common.utils import json_serial
|
||||||
from .models import Source, Media, MediaServer
|
from .models import Source, Media, MediaServer
|
||||||
from .utils import (get_remote_image, resize_image_to_height, delete_file,
|
from .utils import (get_remote_image, resize_image_to_height, delete_file,
|
||||||
write_text_file)
|
write_text_file)
|
||||||
|
@ -131,18 +132,25 @@ def cleanup_completed_tasks():
|
||||||
|
|
||||||
|
|
||||||
def cleanup_old_media():
|
def cleanup_old_media():
|
||||||
for media in Media.objects.filter(download_date__isnull=False):
|
for source in Source.objects.filter(delete_old_media=True, days_to_keep__gt=0):
|
||||||
if media.source.delete_old_media and media.source.days_to_keep > 0:
|
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
||||||
delta = timezone.now() - timedelta(days=media.source.days_to_keep)
|
for media in source.media_source.filter(downloaded=True, download_date__lt=delta):
|
||||||
if media.downloaded and media.download_date < delta:
|
log.info(f'Deleting expired media: {source} / {media} '
|
||||||
# Media was downloaded after the cutoff date, delete it
|
f'(now older than {source.days_to_keep} days / '
|
||||||
log.info(f'Deleting expired media: {media.source} / {media} '
|
|
||||||
f'(now older than {media.source.days_to_keep} days / '
|
|
||||||
f'download_date before {delta})')
|
f'download_date before {delta})')
|
||||||
# .delete() also triggers a pre_delete signal that removes the files
|
# .delete() also triggers a pre_delete signal that removes the files
|
||||||
media.delete()
|
media.delete()
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_removed_media(source, videos):
|
||||||
|
media_objects = Media.objects.filter(source=source, downloaded=True)
|
||||||
|
for item in media_objects:
|
||||||
|
matching_source_item = [video['id'] for video in videos if video['id'] == item.key]
|
||||||
|
if not matching_source_item:
|
||||||
|
log.info(f'{item.title} is no longer in source, removing')
|
||||||
|
item.delete()
|
||||||
|
|
||||||
|
|
||||||
@background(schedule=0)
|
@background(schedule=0)
|
||||||
def index_source_task(source_id):
|
def index_source_task(source_id):
|
||||||
'''
|
'''
|
||||||
|
@ -152,7 +160,6 @@ def index_source_task(source_id):
|
||||||
source = Source.objects.get(pk=source_id)
|
source = Source.objects.get(pk=source_id)
|
||||||
except Source.DoesNotExist:
|
except Source.DoesNotExist:
|
||||||
# Task triggered but the Source has been deleted, delete the task
|
# Task triggered but the Source has been deleted, delete the task
|
||||||
delete_index_source_task(source_id)
|
|
||||||
return
|
return
|
||||||
# Reset any errors
|
# Reset any errors
|
||||||
source.has_failed = False
|
source.has_failed = False
|
||||||
|
@ -175,34 +182,10 @@ def index_source_task(source_id):
|
||||||
# Video has no unique key (ID), it can't be indexed
|
# Video has no unique key (ID), it can't be indexed
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
media = Media.objects.get(key=key)
|
media = Media.objects.get(key=key, source=source)
|
||||||
except Media.DoesNotExist:
|
except Media.DoesNotExist:
|
||||||
media = Media(key=key)
|
media = Media(key=key)
|
||||||
media.source = source
|
media.source = source
|
||||||
media.metadata = json.dumps(video)
|
|
||||||
upload_date = media.upload_date
|
|
||||||
# Media must have a valid upload date
|
|
||||||
if upload_date:
|
|
||||||
media.published = timezone.make_aware(upload_date)
|
|
||||||
else:
|
|
||||||
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
|
||||||
continue
|
|
||||||
# If the source has a download cap date check the upload date is allowed
|
|
||||||
max_cap_age = source.download_cap_date
|
|
||||||
if max_cap_age:
|
|
||||||
if media.published < max_cap_age:
|
|
||||||
# Media was published after the cap date, skip it
|
|
||||||
log.warn(f'Media: {source} / {media} is older than cap age '
|
|
||||||
f'{max_cap_age}, skipping')
|
|
||||||
continue
|
|
||||||
# If the source has a cut-off check the upload date is within the allowed delta
|
|
||||||
if source.delete_old_media and source.days_to_keep > 0:
|
|
||||||
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
|
||||||
if media.published < delta:
|
|
||||||
# Media was published after the cutoff date, skip it
|
|
||||||
log.warn(f'Media: {source} / {media} is older than '
|
|
||||||
f'{source.days_to_keep} days, skipping')
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
media.save()
|
media.save()
|
||||||
log.info(f'Indexed media: {source} / {media}')
|
log.info(f'Indexed media: {source} / {media}')
|
||||||
|
@ -212,6 +195,9 @@ def index_source_task(source_id):
|
||||||
cleanup_completed_tasks()
|
cleanup_completed_tasks()
|
||||||
# Tack on a cleanup of old media
|
# Tack on a cleanup of old media
|
||||||
cleanup_old_media()
|
cleanup_old_media()
|
||||||
|
if source.delete_removed_media:
|
||||||
|
log.info(f'Cleaning up media no longer in source {source}')
|
||||||
|
cleanup_removed_media(source, videos)
|
||||||
|
|
||||||
|
|
||||||
@background(schedule=0)
|
@background(schedule=0)
|
||||||
|
@ -225,7 +211,6 @@ def check_source_directory_exists(source_id):
|
||||||
source = Source.objects.get(pk=source_id)
|
source = Source.objects.get(pk=source_id)
|
||||||
except Source.DoesNotExist:
|
except Source.DoesNotExist:
|
||||||
# Task triggered but the Source has been deleted, delete the task
|
# Task triggered but the Source has been deleted, delete the task
|
||||||
delete_index_source_task(source_id)
|
|
||||||
return
|
return
|
||||||
# Check the source output directory exists
|
# Check the source output directory exists
|
||||||
if not source.directory_exists():
|
if not source.directory_exists():
|
||||||
|
@ -234,6 +219,69 @@ def check_source_directory_exists(source_id):
|
||||||
source.make_directory()
|
source.make_directory()
|
||||||
|
|
||||||
|
|
||||||
|
@background(schedule=0)
|
||||||
|
def download_media_metadata(media_id):
|
||||||
|
'''
|
||||||
|
Downloads the metadata for a media item.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
media = Media.objects.get(pk=media_id)
|
||||||
|
except Media.DoesNotExist:
|
||||||
|
# Task triggered but the media no longer exists, do nothing
|
||||||
|
log.error(f'Task download_media_metadata(pk={media_id}) called but no '
|
||||||
|
f'media exists with ID: {media_id}')
|
||||||
|
return
|
||||||
|
if media.manual_skip:
|
||||||
|
log.info(f'Task for ID: {media_id} skipped, due to task being manually skipped.')
|
||||||
|
return
|
||||||
|
source = media.source
|
||||||
|
metadata = media.index_metadata()
|
||||||
|
media.metadata = json.dumps(metadata, default=json_serial)
|
||||||
|
upload_date = media.upload_date
|
||||||
|
# Media must have a valid upload date
|
||||||
|
if upload_date:
|
||||||
|
media.published = timezone.make_aware(upload_date)
|
||||||
|
else:
|
||||||
|
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
||||||
|
media.skip = True
|
||||||
|
# If the source has a download cap date check the upload date is allowed
|
||||||
|
max_cap_age = source.download_cap_date
|
||||||
|
if media.published and max_cap_age:
|
||||||
|
if media.published < max_cap_age:
|
||||||
|
# Media was published after the cap date, skip it
|
||||||
|
log.warn(f'Media: {source} / {media} is older than cap age '
|
||||||
|
f'{max_cap_age}, skipping')
|
||||||
|
media.skip = True
|
||||||
|
# If the source has a search filter, check the video title matches the filter
|
||||||
|
if source.filter_text and not source.is_regex_match(media.title):
|
||||||
|
# Filter text not found in the media title. Accepts regex string, blank search filter results in this returning false
|
||||||
|
log.warn(f'Media: {source} / {media} does not match {source.filter_text}, skipping')
|
||||||
|
media.skip = True
|
||||||
|
# If the source has a cut-off check the upload date is within the allowed delta
|
||||||
|
if source.delete_old_media and source.days_to_keep > 0:
|
||||||
|
if not isinstance(media.published, datetime):
|
||||||
|
# Media has no known published date or incomplete metadata
|
||||||
|
log.warn(f'Media: {source} / {media} has no published date, skipping')
|
||||||
|
media.skip = True
|
||||||
|
else:
|
||||||
|
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
||||||
|
if media.published < delta:
|
||||||
|
# Media was published after the cutoff date, skip it
|
||||||
|
log.warn(f'Media: {source} / {media} is older than '
|
||||||
|
f'{source.days_to_keep} days, skipping')
|
||||||
|
media.skip = True
|
||||||
|
# Check we can download the media item
|
||||||
|
if not media.skip:
|
||||||
|
if media.get_format_str():
|
||||||
|
media.can_download = True
|
||||||
|
else:
|
||||||
|
media.can_download = False
|
||||||
|
# Save the media
|
||||||
|
media.save()
|
||||||
|
log.info(f'Saved {len(media.metadata)} bytes of metadata for: '
|
||||||
|
f'{source} / {media_id}')
|
||||||
|
|
||||||
|
|
||||||
@background(schedule=0)
|
@background(schedule=0)
|
||||||
def download_media_thumbnail(media_id, url):
|
def download_media_thumbnail(media_id, url):
|
||||||
'''
|
'''
|
||||||
|
@ -279,8 +327,27 @@ def download_media(media_id):
|
||||||
return
|
return
|
||||||
if media.skip:
|
if media.skip:
|
||||||
# Media was toggled to be skipped after the task was scheduled
|
# Media was toggled to be skipped after the task was scheduled
|
||||||
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but it '
|
log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but '
|
||||||
f'is now marked to be skipped, not downloading')
|
f'it is now marked to be skipped, not downloading')
|
||||||
|
return
|
||||||
|
if media.downloaded and media.media_file:
|
||||||
|
# Media has been marked as downloaded before the download_media task was fired,
|
||||||
|
# skip it
|
||||||
|
log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but '
|
||||||
|
f'it has already been marked as downloaded, not downloading again')
|
||||||
|
return
|
||||||
|
if not media.source.download_media:
|
||||||
|
log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but '
|
||||||
|
f'the source {media.source} has since been marked to not download, '
|
||||||
|
f'not downloading')
|
||||||
|
return
|
||||||
|
max_cap_age = media.source.download_cap_date
|
||||||
|
published = media.published
|
||||||
|
if max_cap_age and published:
|
||||||
|
if published <= max_cap_age:
|
||||||
|
log.warn(f'Download task triggered media: {media} (UUID: {media.pk}) but '
|
||||||
|
f'the source has a download cap and the media is now too old, '
|
||||||
|
f'not downloading')
|
||||||
return
|
return
|
||||||
filepath = media.filepath
|
filepath = media.filepath
|
||||||
log.info(f'Downloading media: {media} (UUID: {media.pk}) to: "{filepath}"')
|
log.info(f'Downloading media: {media} (UUID: {media.pk}) to: "{filepath}"')
|
||||||
|
@ -290,7 +357,7 @@ def download_media(media_id):
|
||||||
log.info(f'Successfully downloaded media: {media} (UUID: {media.pk}) to: '
|
log.info(f'Successfully downloaded media: {media} (UUID: {media.pk}) to: '
|
||||||
f'"{filepath}"')
|
f'"{filepath}"')
|
||||||
# Link the media file to the object and update info about the download
|
# Link the media file to the object and update info about the download
|
||||||
media.media_file.name = str(filepath)
|
media.media_file.name = str(media.source.type_directory_path / media.filename)
|
||||||
media.downloaded = True
|
media.downloaded = True
|
||||||
media.download_date = timezone.now()
|
media.download_date = timezone.now()
|
||||||
media.downloaded_filesize = os.path.getsize(filepath)
|
media.downloaded_filesize = os.path.getsize(filepath)
|
||||||
|
|
|
@ -25,12 +25,12 @@
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{mm}</td>
|
<td>{mm}</td>
|
||||||
<td>Media publish year in MM</td>
|
<td>Media publish month in MM</td>
|
||||||
<td>01</td>
|
<td>01</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{dd}</td>
|
<td>{dd}</td>
|
||||||
<td>Media publish year in DD</td>
|
<td>Media publish day in DD</td>
|
||||||
<td>31</td>
|
<td>31</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
|
@ -43,6 +43,11 @@
|
||||||
<td>Full source name</td>
|
<td>Full source name</td>
|
||||||
<td>My Source</td>
|
<td>My Source</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>{uploader}</td>
|
||||||
|
<td>Uploader name</td>
|
||||||
|
<td>Some Channel Name</td>
|
||||||
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{title}</td>
|
<td>{title}</td>
|
||||||
<td>Lower case media title, max 80 chars</td>
|
<td>Lower case media title, max 80 chars</td>
|
||||||
|
@ -63,11 +68,6 @@
|
||||||
<td>Media format string</td>
|
<td>Media format string</td>
|
||||||
<td>720p-avc1-mp4a</td>
|
<td>720p-avc1-mp4a</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
|
||||||
<td>{playlist_index}</td>
|
|
||||||
<td>Playlist index of media, if it's in a playlist</td>
|
|
||||||
<td>12</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>{playlist_title}</td>
|
<td>{playlist_title}</td>
|
||||||
<td>Playlist title of media, if it's in a playlist</td>
|
<td>Playlist title of media, if it's in a playlist</td>
|
||||||
|
|
|
@ -101,7 +101,7 @@
|
||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
<h2 class="truncate">Runtime infomation</h2>
|
<h2 class="truncate">Runtime information</h2>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
@ -123,6 +123,10 @@
|
||||||
<td class="hide-on-small-only">Downloads directory</td>
|
<td class="hide-on-small-only">Downloads directory</td>
|
||||||
<td><span class="hide-on-med-and-up">Downloads directory<br></span><strong>{{ downloads_dir }}</strong></td>
|
<td><span class="hide-on-med-and-up">Downloads directory<br></span><strong>{{ downloads_dir }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Database connection used by TubeSync">
|
||||||
|
<td class="hide-on-small-only">Database</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Database<br></span><strong>{{ database_connection }}</strong></td>
|
||||||
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -9,10 +9,24 @@
|
||||||
{% if media.title %}<h2 class="truncate"><strong>{{ media.title }}</strong></h2>{% endif %}
|
{% if media.title %}<h2 class="truncate"><strong>{{ media.title }}</strong></h2>{% endif %}
|
||||||
<p class="truncate"><strong><a href="{{ media.url }}" target="_blank"><i class="fas fa-link"></i> {{ media.url }}</a></strong></p>
|
<p class="truncate"><strong><a href="{{ media.url }}" target="_blank"><i class="fas fa-link"></i> {{ media.url }}</a></strong></p>
|
||||||
<p class="truncate">Downloading to: <strong>{{ media.source.directory_path }}</strong></p>
|
<p class="truncate">Downloading to: <strong>{{ media.source.directory_path }}</strong></p>
|
||||||
|
{% if download_state == 'downloaded' %}
|
||||||
|
{% if media.source.is_audio %}
|
||||||
|
<audio controls src="{% url 'sync:media-content' pk=media.pk %}"></audio>
|
||||||
|
{% else %}
|
||||||
|
<video controls style="width: 100%">
|
||||||
|
<source src="{% url 'sync:media-content' pk=media.pk %}">
|
||||||
|
</video>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<p class="truncate"><a href="{% url 'sync:media-content' pk=media.pk %}" download="{{ media.filename }}"><strong><i class="fas fa-download"></i> Download</strong></a></p>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
{% if media.manual_skip %}{% include 'errorbox.html' with message='Media is marked to be skipped and will not be downloaded.' %}
|
||||||
|
{% else %}
|
||||||
{% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
|
{% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
|
||||||
{% if media.skip %}{% include 'errorbox.html' with message='Media is marked to be skipped and will not be downloaded.' %}{% endif %}
|
{% if media.skip %}{% include 'errorbox.html' with message='This media may be skipped due to error(s).' %}{% endif %}
|
||||||
|
{% endif %}
|
||||||
{% include 'infobox.html' with message=message %}
|
{% include 'infobox.html' with message=message %}
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12 m7">
|
<div class="col s12 m7">
|
||||||
|
@ -64,8 +78,14 @@
|
||||||
<td class="hide-on-small-only">Fallback</td>
|
<td class="hide-on-small-only">Fallback</td>
|
||||||
<td><span class="hide-on-med-and-up">Fallback<br></span><strong>{{ media.source.get_fallback_display }}</strong></td>
|
<td><span class="hide-on-med-and-up">Fallback<br></span><strong>{{ media.source.get_fallback_display }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
{% if not media.source.download_media %}
|
||||||
|
<tr title="Is media marked to be downloaded at the source?">
|
||||||
|
<td class="hide-on-small-only">Source download?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Source download?<br></span><strong>{% if media.source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
{% if media.skip %}
|
{% if media.skip %}
|
||||||
<tr title="Has the media been downloaded?">
|
<tr title="Is the media marked to be skipped?">
|
||||||
<td class="hide-on-small-only">Skipping?</td>
|
<td class="hide-on-small-only">Skipping?</td>
|
||||||
<td><span class="hide-on-med-and-up">Skipping?<br></span><strong>{% if media.skip %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Skipping?<br></span><strong>{% if media.skip %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
@ -109,7 +129,7 @@
|
||||||
{% else %}
|
{% else %}
|
||||||
<tr title="Can the media be downloaded?">
|
<tr title="Can the media be downloaded?">
|
||||||
<td class="hide-on-small-only">Can download?</td>
|
<td class="hide-on-small-only">Can download?</td>
|
||||||
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if youtube_dl_format %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if media.can_download %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<tr title="The available media formats">
|
<tr title="The available media formats">
|
||||||
|
@ -150,10 +170,10 @@
|
||||||
{% else %}
|
{% else %}
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
{% if media.skip %}
|
{% if media.manual_skip %}
|
||||||
<a href="{% url 'sync:enable-media' pk=media.pk %}" class="btn">Enable (unskip) media <i class="fas fa-cloud-download-alt"></i></a>
|
<a href="{% url 'sync:enable-media' pk=media.pk %}" class="btn">Unskip media (manually) <i class="fas fa-cloud-download-alt"></i></a>
|
||||||
{% else %}
|
{% else %}
|
||||||
<a href="{% url 'sync:skip-media' pk=media.pk %}" class="btn delete-button">Skip media <i class="fas fa-times-circle"></i></a>
|
<a href="{% url 'sync:skip-media' pk=media.pk %}" class="btn delete-button">Manually mark media to be skipped <i class="fas fa-times-circle"></i></a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -4,9 +4,23 @@
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12 m6">
|
||||||
<h1 class="truncate">Media</h1>
|
<h1 class="truncate">Media</h1>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="col s12 m3">
|
||||||
|
{% if show_skipped %}
|
||||||
|
<a href="{% url 'sync:media' %}{% if source %}?filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye-slash"></i> Hide skipped media</a>
|
||||||
|
{% else %}
|
||||||
|
<a href="{% url 'sync:media' %}?show_skipped=yes{% if source %}&filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye"></i> Show skipped media</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="col s12 m3">
|
||||||
|
{% if only_skipped %}
|
||||||
|
<a href="{% url 'sync:media' %}{% if source %}?filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye-slash"></i> Only skipped media</a>
|
||||||
|
{% else %}
|
||||||
|
<a href="{% url 'sync:media' %}?only_skipped=yes{% if source %}&filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye"></i> Only skipped media</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% include 'infobox.html' with message=message %}
|
{% include 'infobox.html' with message=message %}
|
||||||
<div class="row no-margin-bottom">
|
<div class="row no-margin-bottom">
|
||||||
|
@ -22,10 +36,16 @@
|
||||||
{% if m.downloaded %}
|
{% if m.downloaded %}
|
||||||
<i class="fas fa-check-circle" title="Downloaded"></i> {{ m.download_date|date:'Y-m-d' }}
|
<i class="fas fa-check-circle" title="Downloaded"></i> {{ m.download_date|date:'Y-m-d' }}
|
||||||
{% else %}
|
{% else %}
|
||||||
{% if m.skip %}
|
{% if m.manual_skip %}
|
||||||
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
|
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Manually skipped</span>
|
||||||
|
{% elif m.skip %}
|
||||||
|
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped by system</span>
|
||||||
|
{% elif not m.source.download_media %}
|
||||||
|
<span class="error-text"><i class="fas fa-times" title="Not downloading media for this source"></i> Disabled at source</span>
|
||||||
|
{% elif not m.has_metadata %}
|
||||||
|
<i class="far fa-clock" title="Waiting for metadata"></i> Fetching metadata
|
||||||
{% elif m.can_download %}
|
{% elif m.can_download %}
|
||||||
<i class="far fa-clock" title="Waiting to download or downloading"></i> {{ m.published|date:'Y-m-d' }}
|
<i class="far fa-clock" title="Waiting to download or downloading"></i> Downloading
|
||||||
{% else %}
|
{% else %}
|
||||||
<span class="error-text"><i class="fas fa-exclamation-triangle" title="No matching formats to download"></i> No matching formats</span>
|
<span class="error-text"><i class="fas fa-exclamation-triangle" title="No matching formats to download"></i> No matching formats</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -44,5 +64,5 @@
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk %}
|
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped only_skipped=only_skipped%}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
|
@ -9,8 +9,8 @@
|
||||||
<p>
|
<p>
|
||||||
Are you sure you want to delete this source? Deleting a source is permanent.
|
Are you sure you want to delete this source? Deleting a source is permanent.
|
||||||
By default, deleting a source does not delete any saved media files. You can
|
By default, deleting a source does not delete any saved media files. You can
|
||||||
tick the "also delete downloaded media" checkbox to also remove save
|
<strong>tick the "also delete downloaded media" checkbox to also remove directory {{ source.directory_path }}
|
||||||
media when you delete the source. Deleting a source cannot be undone.
|
</strong>when you delete the source. Deleting a source cannot be undone.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -43,6 +43,10 @@
|
||||||
<td class="hide-on-small-only">Directory</td>
|
<td class="hide-on-small-only">Directory</td>
|
||||||
<td><span class="hide-on-med-and-up">Directory<br></span><strong>{{ source.directory }}</strong></td>
|
<td><span class="hide-on-med-and-up">Directory<br></span><strong>{{ source.directory }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Filter text">
|
||||||
|
<td class="hide-on-small-only">Filter text</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Filter text<br></span><strong>{{ source.filter_text }}</strong></td>
|
||||||
|
</tr>
|
||||||
<tr title="Media file name format to use for saving files">
|
<tr title="Media file name format to use for saving files">
|
||||||
<td class="hide-on-small-only">Media format</td>
|
<td class="hide-on-small-only">Media format</td>
|
||||||
<td><span class="hide-on-med-and-up">Media format<br></span><strong>{{ source.media_format }}</strong></td>
|
<td><span class="hide-on-med-and-up">Media format<br></span><strong>{{ source.media_format }}</strong></td>
|
||||||
|
@ -61,6 +65,10 @@
|
||||||
<td class="hide-on-small-only">Index schedule</td>
|
<td class="hide-on-small-only">Index schedule</td>
|
||||||
<td><span class="hide-on-med-and-up">Index schedule<br></span><strong>{{ source.get_index_schedule_display }}</strong></td>
|
<td><span class="hide-on-med-and-up">Index schedule<br></span><strong>{{ source.get_index_schedule_display }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Download media from this source">
|
||||||
|
<td class="hide-on-small-only">Download media?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Download media?<br></span><strong>{% if source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
<tr title="When then source was created locally in TubeSync">
|
<tr title="When then source was created locally in TubeSync">
|
||||||
<td class="hide-on-small-only">Created</td>
|
<td class="hide-on-small-only">Created</td>
|
||||||
<td><span class="hide-on-med-and-up">Created<br></span><strong>{{ source.created|date:'Y-m-d H:i:s' }}</strong></td>
|
<td><span class="hide-on-med-and-up">Created<br></span><strong>{{ source.created|date:'Y-m-d H:i:s' }}</strong></td>
|
||||||
|
@ -107,6 +115,18 @@
|
||||||
<td class="hide-on-small-only">Write NFO?</td>
|
<td class="hide-on-small-only">Write NFO?</td>
|
||||||
<td><span class="hide-on-med-and-up">Write NFO?<br></span><strong>{% if source.write_nfo %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Write NFO?<br></span><strong>{% if source.write_nfo %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Should a JSON file be written with the media?">
|
||||||
|
<td class="hide-on-small-only">Write JSON?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Write JSON?<br></span><strong>{% if source.write_json %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
|
<tr title="Delete media that is no longer on this playlist?">
|
||||||
|
<td class="hide-on-small-only">Delete removed media</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Delete removed media<br></span><strong>{% if source.delete_removed_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
|
<tr title="Delete files on disk when they are removed from TubeSync?">
|
||||||
|
<td class="hide-on-small-only">Delete files on disk</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Delete files on disk<br></span><strong>{% if source.delete_files_on_disk %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
{% if source.delete_old_media and source.days_to_keep > 0 %}
|
{% if source.delete_old_media and source.days_to_keep > 0 %}
|
||||||
<tr title="Days after which your media from this source will be locally deleted">
|
<tr title="Days after which your media from this source will be locally deleted">
|
||||||
<td class="hide-on-small-only">Delete old media</td>
|
<td class="hide-on-small-only">Delete old media</td>
|
||||||
|
@ -122,6 +142,55 @@
|
||||||
<td class="hide-on-small-only">UUID</td>
|
<td class="hide-on-small-only">UUID</td>
|
||||||
<td><span class="hide-on-med-and-up">UUID<br></span><strong>{{ source.uuid }}</strong></td>
|
<td><span class="hide-on-med-and-up">UUID<br></span><strong>{{ source.uuid }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="{{ _('Embedding thumbnail?') }}">
|
||||||
|
<td class="hide-on-small-only">{{ _("Embed thumbnail?") }}:</td>
|
||||||
|
<td><span class="hide-on-med-and-up">{{ _("Embed thumbnail?") }}<br></span><strong><i class="fas {% if source.embed_thumbnail %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
|
||||||
|
</tr>
|
||||||
|
<tr title="{{ _('Embedding metadata?') }}">
|
||||||
|
<td class="hide-on-small-only">{{ _("Embed metadata?") }}:</td>
|
||||||
|
<td><span class="hide-on-med-and-up">{{ _("Embed metadata?") }}<br></span><strong><i class="fas {% if source.embed_metadata %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<tr title="{{ _('Is sponsorblock enabled?') }}">
|
||||||
|
<td class="hide-on-small-only">{{ _("SponsorBlock?") }}:</td>
|
||||||
|
<td><span class="hide-on-med-and-up">{{ _("Sponsorblock enabled?") }}<br></span><strong><i class="fas {% if source.enable_sponsorblock %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% if source.enable_sponsorblock %}
|
||||||
|
<tr title="{{ _('SponsorBlock: What to block?') }}">
|
||||||
|
<td class="hide-on-small-only">{{ _("What blocked?") }}:</td>
|
||||||
|
<td><span class="hide-on-med-and-up">{{ _("What blocked?") }}<br></span><strong>
|
||||||
|
{% if source.sponsorblock_categories.all_choice in source.sponsorblock_categories.selected_choices %}
|
||||||
|
{% for k,v in source.sponsorblock_categories.possible_choices %}
|
||||||
|
{{ v }}: <i class="fas fa-check"></i><BR>
|
||||||
|
{% endfor %}
|
||||||
|
{% else %}
|
||||||
|
{% for c in source.sponsorblock_categories.selected_choices %}
|
||||||
|
{% for k,v in source.sponsorblock_categories.possible_choices %}
|
||||||
|
{% if k == c %} {{ v }}: <i class="fas fa-check"></i><BR>{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
</strong></td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<tr title="{{ _('Are Subtitles downloaded?') }}">
|
||||||
|
<td class="hide-on-small-only">{{ _("Download subtitles?") }}:</td>
|
||||||
|
<td><span class="hide-on-med-and-up">{{ _("Download subtitles?") }}:</span><strong><i class="fas {% if source.write_subtitles %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% if source.write_subtitles %}
|
||||||
|
<tr title="{{ _('Are auto subs accepted?') }}">
|
||||||
|
<td class="hide-on-small-only">{{ _("Auto-generated subtitles?") }}:</td>
|
||||||
|
<td><span class="hide-on-med-and-up">{{ _("Auto-generated subtitles?") }}:</span><strong><i class="fas {% if source.auto_subtitles %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
|
||||||
|
</tr>
|
||||||
|
<tr title="{{ _('Subs langs?') }}">
|
||||||
|
<td class="hide-on-small-only">{{ _("Subs langs?") }}:</td>
|
||||||
|
<td><span class="hide-on-med-and-up">{{ _("Subs langs?") }}:</span><strong>{{source.sub_langs}}</strong></td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -24,15 +24,18 @@
|
||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
<div class="collection">
|
<div class="collection">
|
||||||
{% for source in sources %}
|
{% for source in sources %}
|
||||||
<a href="{% url 'sync:source' pk=source.pk %}" class="collection-item">
|
<span class="collection-item flex-collection-container">
|
||||||
|
<a href="{% url 'sync:source' pk=source.pk %}" class="flex-grow">
|
||||||
{{ source.icon|safe }} <strong>{{ source.name }}</strong> ({{ source.get_source_type_display }} "{{ source.key }}")<br>
|
{{ source.icon|safe }} <strong>{{ source.name }}</strong> ({{ source.get_source_type_display }} "{{ source.key }}")<br>
|
||||||
{{ source.format_summary }}<br>
|
{{ source.format_summary }}<br>
|
||||||
{% if source.has_failed %}
|
{% if source.has_failed %}
|
||||||
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
|
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
|
||||||
{% else %}
|
{% else %}
|
||||||
<strong>{{ source.media_count }}</strong> media items{% if source.delete_old_media and source.days_to_keep > 0 %}, keep {{ source.days_to_keep }} days of media{% endif %}
|
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</a>
|
</a>
|
||||||
|
<a href="{% url 'sync:source-sync-now' pk=source.pk %}" class="collection-item"><i class="fas fa-arrow-rotate-right"></i></a>
|
||||||
|
</span>
|
||||||
{% empty %}
|
{% empty %}
|
||||||
<span class="collection-item no-items"><i class="fas fa-info-circle"></i> You haven't added any sources.</span>
|
<span class="collection-item no-items"><i class="fas fa-info-circle"></i> You haven't added any sources.</span>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue