Compare commits
273 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2431f8775a | ||
|
|
438316953a | ||
|
|
85637fecba | ||
|
|
f9dfffe91a | ||
|
|
0845a6662d | ||
|
|
419c4c5a9f | ||
|
|
2f475bf2a8 | ||
|
|
7d16a1714c | ||
|
|
a7100a0f53 | ||
|
|
5a4e6cee58 | ||
|
|
e69adafcec | ||
|
|
f9908a4d3b | ||
|
|
bf99241ad2 | ||
|
|
0e278bc8c4 | ||
|
|
57921ca6b9 | ||
|
|
fb23fdeae1 | ||
|
|
433a7792d5 | ||
|
|
e198cc011b | ||
|
|
296a790af5 | ||
|
|
e190821b7b | ||
|
|
1ba865cf0d | ||
|
|
05d50c958e | ||
|
|
8426c7309a | ||
|
|
0450d47d81 | ||
|
|
e8d899d273 | ||
|
|
25d5768f6e | ||
|
|
e9a3f2dd59 | ||
|
|
7832282545 | ||
|
|
d161aef112 | ||
|
|
8901aea8d7 | ||
|
|
227cae4cdb | ||
|
|
5e57abe86a | ||
|
|
c04c1b3cfb | ||
|
|
a94541a354 | ||
|
|
84a368aa09 | ||
|
|
6d2fb86e7d | ||
|
|
67a3998aac | ||
|
|
e3ca39b5db | ||
|
|
872bfc5124 | ||
|
|
ae5550a28d | ||
|
|
153ca032b1 | ||
|
|
95e727b0a8 | ||
|
|
f1c6fc3086 | ||
|
|
a3559526cb | ||
|
|
a0ca2b3061 | ||
|
|
120a19d2ba | ||
|
|
4735e72f12 | ||
|
|
5954dba48d | ||
|
|
3f699c82ec | ||
|
|
cb39ece21b | ||
|
|
3943115b18 | ||
|
|
97183fff97 | ||
|
|
b4a247bf37 | ||
|
|
3bee755eb5 | ||
|
|
9957639be5 | ||
|
|
a5a8e37a20 | ||
|
|
7a1b2adc59 | ||
|
|
7668466bc3 | ||
|
|
ceb8cbc442 | ||
|
|
8b0d1b3397 | ||
|
|
77fb4963f9 | ||
|
|
538b3cb319 | ||
|
|
2335ceb2dc | ||
|
|
0c347d523d | ||
|
|
d0a214e21b | ||
|
|
2d8e6ed9b8 | ||
|
|
d0fcc07656 | ||
|
|
5bf53b3d3a | ||
|
|
280112beae | ||
|
|
367d41f2be | ||
|
|
61cd63bcc1 | ||
|
|
62e2e2f9e6 | ||
|
|
aa90a1afb0 | ||
|
|
238c0b5911 | ||
|
|
4d7e9133e0 | ||
|
|
709b7b44d5 | ||
|
|
425b011054 | ||
|
|
b1b3c99726 | ||
|
|
02212b8fad | ||
|
|
70e541dea0 | ||
|
|
cc7b7727c2 | ||
|
|
0757c99f01 | ||
|
|
61d97201a5 | ||
|
|
a58aef29fb | ||
|
|
56c882fa79 | ||
|
|
9a3030543f | ||
|
|
4eca23d88b | ||
|
|
aa6df98927 | ||
|
|
f3cac1908c | ||
|
|
d9a519ffde | ||
|
|
185823b040 | ||
|
|
4774a35d44 | ||
|
|
b4a89968d0 | ||
|
|
5056419aa4 | ||
|
|
a8488026d0 | ||
|
|
6459e273f1 | ||
|
|
42e4ee775f | ||
|
|
b3d9e74818 | ||
|
|
c396821cb1 | ||
|
|
f9858a4d1a | ||
|
|
3c1d64a089 | ||
|
|
00fbd53b11 | ||
|
|
99825c9a08 | ||
|
|
4f163f2f2c | ||
|
|
936800992c | ||
|
|
2e9ee04c97 | ||
|
|
8d60629034 | ||
|
|
f54adab213 | ||
|
|
6618409f9c | ||
|
|
8d08027024 | ||
|
|
9a543b1496 | ||
|
|
b70703b7a7 | ||
|
|
6ac0c6e9de | ||
|
|
ecb1aaf5b5 | ||
|
|
4c5027e0c4 | ||
|
|
e8d75a79c5 | ||
|
|
ff4be7cfa0 | ||
|
|
c1cb19259e | ||
|
|
837b6c3107 | ||
|
|
ced6314a62 | ||
|
|
bb6c195ae7 | ||
|
|
c280b76777 | ||
|
|
248da767b0 | ||
|
|
1069b87295 | ||
|
|
3525a65cd6 | ||
|
|
c51a5bb365 | ||
|
|
7f4b9aff14 | ||
|
|
a59e7fe65f | ||
|
|
3e0a71f2ef | ||
|
|
3dfbca2af4 | ||
|
|
0c256f59d8 | ||
|
|
dbbae72c25 | ||
|
|
b1b852d82c | ||
|
|
437bb17f75 | ||
|
|
fdfcb5fd33 | ||
|
|
ff35f791f6 | ||
|
|
b2ea37ffec | ||
|
|
d89530d5b8 | ||
|
|
f00050008b | ||
|
|
68604d19c7 | ||
|
|
55e5b5632f | ||
|
|
5e18cb92dd | ||
|
|
6178e0baa0 | ||
|
|
8050bac507 | ||
|
|
6dcdac1647 | ||
|
|
763f6b89ef | ||
|
|
6c28292918 | ||
|
|
574fc55a5e | ||
|
|
c8fd74b3a4 | ||
|
|
6622e17a5a | ||
|
|
ea05bd0b13 | ||
|
|
019c98dc76 | ||
|
|
72dfe51a46 | ||
|
|
22cebba8ac | ||
|
|
d51d198f94 | ||
|
|
ed0c2d7dd3 | ||
|
|
5ced901ae8 | ||
|
|
afda481046 | ||
|
|
a986864f77 | ||
|
|
ad1c4ecbc9 | ||
|
|
54b7de4442 | ||
|
|
d1996aee80 | ||
|
|
326cefbec1 | ||
|
|
d6e81c6af7 | ||
|
|
a000f8f2c0 | ||
|
|
cbab09e931 | ||
|
|
414fca08ca | ||
|
|
874c71b7aa | ||
|
|
5b101825f5 | ||
|
|
0db8db4351 | ||
|
|
d4fd148089 | ||
|
|
c739d594d8 | ||
|
|
05e8ad8e89 | ||
|
|
024ab72e5f | ||
|
|
66ec3a29ec | ||
|
|
28a565737f | ||
|
|
2c7116f6ba | ||
|
|
9ccb9db6de | ||
|
|
2d992cbb90 | ||
|
|
302a3614cf | ||
|
|
ea546013de | ||
|
|
fb18610893 | ||
|
|
2364432088 | ||
|
|
655bed14fd | ||
|
|
721399f665 | ||
|
|
694ed5c581 | ||
|
|
a98f2462ed | ||
|
|
5461a5357d | ||
|
|
20df9f4044 | ||
|
|
3ec4f7c525 | ||
|
|
443fb827d0 | ||
|
|
a810303f52 | ||
|
|
9370a481f9 | ||
|
|
1478c95d59 | ||
|
|
f69fa747af | ||
|
|
a29a92893f | ||
|
|
7d471056c1 | ||
|
|
119493c181 | ||
|
|
02a0f924b4 | ||
|
|
38665eb00d | ||
|
|
c32358bcef | ||
|
|
df9316bede | ||
|
|
8525d920a0 | ||
|
|
a6e08d9a10 | ||
|
|
2e0d0385b0 | ||
|
|
972c184c70 | ||
|
|
adeafbfcb4 | ||
|
|
2c1c45e829 | ||
|
|
c64f54bcb4 | ||
|
|
6ce55b0337 | ||
|
|
d06c4beae0 | ||
|
|
db651e16b9 | ||
|
|
86068790ed | ||
|
|
ea72671351 | ||
|
|
96b9eddf43 | ||
|
|
bceefc8b01 | ||
|
|
820cc69937 | ||
|
|
1e8711be51 | ||
|
|
e3423bc2d2 | ||
|
|
6fbf72d0e7 | ||
|
|
d6852bf828 | ||
|
|
f6f4f244d7 | ||
|
|
df35aa2a5f | ||
|
|
799c0fce39 | ||
|
|
2f324f28a9 | ||
|
|
895bfe6f87 | ||
|
|
e0669b107d | ||
|
|
0dc201b293 | ||
|
|
82fa0f6bce | ||
|
|
8b93cb4a59 | ||
|
|
647254d7f7 | ||
|
|
3567e20600 | ||
|
|
5348e25303 | ||
|
|
749df3f7bb | ||
|
|
2c2f53e5b2 | ||
|
|
06cfafb803 | ||
|
|
f5a37f2e86 | ||
|
|
36747a47e0 | ||
|
|
ffd69e8d40 | ||
|
|
eebef3371f | ||
|
|
4cd6701c8a | ||
|
|
4ebe6f2a37 | ||
|
|
d553d58fde | ||
|
|
df40a1367a | ||
|
|
607ee77e70 | ||
|
|
9af493aa8a | ||
|
|
f0c94ff789 | ||
|
|
39c7799831 | ||
|
|
da7371f830 | ||
|
|
387cfefc8f | ||
|
|
d92dbde781 | ||
|
|
e36658e1a1 | ||
|
|
51cd942717 | ||
|
|
001554db1a | ||
|
|
7cf86bb98d | ||
|
|
c28c095f48 | ||
|
|
12eac049e5 | ||
|
|
304cc153cf | ||
|
|
b45231f533 | ||
|
|
26eb9d30e8 | ||
|
|
97fa62d12b | ||
|
|
1b092fe955 | ||
|
|
18a59fe835 | ||
|
|
410906ad8e | ||
|
|
8f4b09f346 | ||
|
|
cda021cbbf | ||
|
|
ee4df99cd8 | ||
|
|
53f1873a9b | ||
|
|
9434293a84 | ||
|
|
ed69fe9dcc | ||
|
|
67af70569b | ||
|
|
68a62d8a7c | ||
|
|
55578f4de7 |
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
github: [meeb]
|
||||||
33
.github/workflows/ci.yaml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install pipenv
|
pip install pipenv
|
||||||
pipenv install --system
|
pipenv install --system --skip-lock
|
||||||
- name: Set up Django environment
|
- name: Set up Django environment
|
||||||
run: cp tubesync/tubesync/local_settings.py.example tubesync/tubesync/local_settings.py
|
run: cp tubesync/tubesync/local_settings.py.example tubesync/tubesync/local_settings.py
|
||||||
- name: Run Django tests
|
- name: Run Django tests
|
||||||
@@ -35,13 +35,24 @@ jobs:
|
|||||||
containerise:
|
containerise:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Set up QEMU
|
||||||
- name: Build the container image
|
uses: docker/setup-qemu-action@v1
|
||||||
run: docker build . --tag $IMAGE_NAME
|
- name: Set up Docker Buildx
|
||||||
- name: Log into GitHub Container Registry
|
uses: docker/setup-buildx-action@v1
|
||||||
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
- name: Log into GitHub Container Registry
|
||||||
- name: Push image to GitHub Container Registry
|
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
run: |
|
- name: Lowercase github username for ghcr
|
||||||
LATEST_TAG=ghcr.io/meeb/$IMAGE_NAME:latest
|
id: string
|
||||||
docker tag $IMAGE_NAME $LATEST_TAG
|
uses: ASzc/change-string-case-action@v1
|
||||||
docker push $LATEST_TAG
|
with:
|
||||||
|
string: ${{ github.actor }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
cache-from: type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
cache-to: type=inline
|
||||||
|
build-args: |
|
||||||
|
IMAGE_NAME=${{ env.IMAGE_NAME }}
|
||||||
|
|||||||
40
.github/workflows/release.yaml
vendored
@@ -11,18 +11,28 @@ jobs:
|
|||||||
containerise:
|
containerise:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Set up QEMU
|
||||||
- name: Get tag
|
uses: docker/setup-qemu-action@v1
|
||||||
id: vars
|
- name: Get tag
|
||||||
run: echo ::set-output name=tag::${GITHUB_REF#refs/*/}
|
id: tag
|
||||||
- name: Build the container image
|
uses: dawidd6/action-get-tag@v1
|
||||||
run: docker build . --tag $IMAGE_NAME
|
- uses: docker/build-push-action@v2
|
||||||
- name: Log into GitHub Container Registry
|
- name: Set up Docker Buildx
|
||||||
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: Push image to GitHub Container Registry
|
- name: Log into GitHub Container Registry
|
||||||
env:
|
run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
RELEASE_TAG: ${{ steps.vars.outputs.tag }}
|
- name: Lowercase github username for ghcr
|
||||||
run: |
|
id: string
|
||||||
REF_TAG=ghcr.io/meeb/$IMAGE_NAME:$RELEASE_TAG
|
uses: ASzc/change-string-case-action@v1
|
||||||
docker tag $IMAGE_NAME $REF_TAG
|
with:
|
||||||
docker push $REF_TAG
|
string: ${{ github.actor }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }}
|
||||||
|
cache-from: type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }}
|
||||||
|
cache-to: type=inline
|
||||||
|
build-args: |
|
||||||
|
IMAGE_NAME=${{ env.IMAGE_NAME }}
|
||||||
|
|||||||
148
Dockerfile
@@ -1,72 +1,111 @@
|
|||||||
FROM debian:buster-slim
|
FROM debian:bullseye-slim
|
||||||
|
|
||||||
ARG ARCH="amd64"
|
ARG TARGETPLATFORM
|
||||||
ARG S6_VERSION="2.1.0.2"
|
ARG S6_VERSION="3.1.2.1"
|
||||||
ARG FFMPEG_VERSION="4.3.1"
|
ARG FFMPEG_DATE="autobuild-2023-01-03-12-55"
|
||||||
|
ARG FFMPEG_VERSION="109474-gc94988a781"
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND="noninteractive" \
|
ENV DEBIAN_FRONTEND="noninteractive" \
|
||||||
HOME="/root" \
|
HOME="/root" \
|
||||||
LANGUAGE="en_US.UTF-8" \
|
LANGUAGE="en_US.UTF-8" \
|
||||||
LANG="en_US.UTF-8" \
|
LANG="en_US.UTF-8" \
|
||||||
LC_ALL="en_US.UTF-8" \
|
LC_ALL="en_US.UTF-8" \
|
||||||
TERM="xterm" \
|
TERM="xterm"
|
||||||
S6_EXPECTED_SHA256="52460473413601ff7a84ae690b161a074217ddc734990c2cdee9847166cf669e" \
|
|
||||||
S6_DOWNLOAD="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-${ARCH}.tar.gz" \
|
|
||||||
FFMPEG_EXPECTED_SHA256="47d95c0129fba27d051748a442a44a73ce1bd38d1e3f9fe1e9dd7258c7581fa5" \
|
|
||||||
FFMPEG_DOWNLOAD="https://tubesync.sfo2.digitaloceanspaces.com/ffmpeg-${FFMPEG_VERSION}-${ARCH}-static.tar.xz"
|
|
||||||
|
|
||||||
|
|
||||||
# Install third party software
|
# Install third party software
|
||||||
RUN set -x && \
|
RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
apt-get update && \
|
"linux/amd64") echo "amd64" ;; \
|
||||||
apt-get -y --no-install-recommends install locales && \
|
"linux/arm64") echo "aarch64" ;; \
|
||||||
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
|
*) echo "" ;; esac) && \
|
||||||
locale-gen en_US.UTF-8 && \
|
export S6_ARCH_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
# Install required distro packages
|
"linux/amd64") echo "6019b6b06cfdbb1d1cd572d46b9b158a4904fd19ca59d374de4ddaaa6a3727d5" ;; \
|
||||||
apt-get -y --no-install-recommends install curl xz-utils ca-certificates binutils && \
|
"linux/arm64") echo "e73f9a021b64f88278830742149c14ef8a52331102881ba025bf32a66a0e7c78" ;; \
|
||||||
# Install s6
|
*) echo "" ;; esac) && \
|
||||||
curl -L ${S6_DOWNLOAD} --output /tmp/s6-overlay-${ARCH}.tar.gz && \
|
export S6_DOWNLOAD_ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
sha256sum /tmp/s6-overlay-${ARCH}.tar.gz && \
|
"linux/amd64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-x86_64.tar.xz" ;; \
|
||||||
echo "${S6_EXPECTED_SHA256} /tmp/s6-overlay-${ARCH}.tar.gz" | sha256sum -c - && \
|
"linux/arm64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-aarch64.tar.xz" ;; \
|
||||||
tar xzf /tmp/s6-overlay-${ARCH}.tar.gz -C / && \
|
*) echo "" ;; esac) && \
|
||||||
# Install ffmpeg
|
export FFMPEG_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
curl -L ${FFMPEG_DOWNLOAD} --output /tmp/ffmpeg-${ARCH}-static.tar.xz && \
|
"linux/amd64") echo "ed9059668e4a6dac9bde122a775f52ad08cbb90df3658f8c1e328477c13c242e" ;; \
|
||||||
echo "${FFMPEG_EXPECTED_SHA256} /tmp/ffmpeg-${ARCH}-static.tar.xz" | sha256sum -c - && \
|
"linux/arm64") echo "dd1375bd351d38ea1cc3efd68a998699366e28bd9b90df65d11af2b9121746b7" ;; \
|
||||||
xz --decompress /tmp/ffmpeg-${ARCH}-static.tar.xz && \
|
*) echo "" ;; esac) && \
|
||||||
tar -xvf /tmp/ffmpeg-${ARCH}-static.tar -C /tmp && \
|
export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||||
install -v -s -g root -o root -m 0755 -s /tmp/ffmpeg-${FFMPEG_VERSION}-${ARCH}-static/ffmpeg -t /usr/local/bin && \
|
"linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linux64-gpl.tar.xz" ;; \
|
||||||
# Clean up
|
"linux/arm64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linuxarm64-gpl.tar.xz" ;; \
|
||||||
rm -rf /tmp/s6-overlay-${ARCH}.tar.gz && \
|
*) echo "" ;; esac) && \
|
||||||
rm -rf /tmp/ffmpeg-${ARCH}-static.tar && \
|
export S6_NOARCH_EXPECTED_SHA256="cee89d3eeabdfe15239b2c5c3581d9352d2197d4fd23bba3f1e64bf916ccf496" && \
|
||||||
rm -rf /tmp/ffmpeg-${FFMPEG_VERSION}-${ARCH}-static && \
|
export S6_DOWNLOAD_NOARCH="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-noarch.tar.xz" && \
|
||||||
apt-get -y autoremove --purge curl xz-utils binutils
|
echo "Building for arch: ${ARCH}|${ARCH44}, downloading S6 from: ${S6_DOWNLOAD}}, expecting S6 SHA256: ${S6_EXPECTED_SHA256}" && \
|
||||||
|
set -x && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get -y --no-install-recommends install locales && \
|
||||||
|
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
|
||||||
|
locale-gen en_US.UTF-8 && \
|
||||||
|
# Install required distro packages
|
||||||
|
apt-get -y --no-install-recommends install curl ca-certificates binutils xz-utils && \
|
||||||
|
# Install s6
|
||||||
|
curl -L ${S6_DOWNLOAD_NOARCH} --output /tmp/s6-overlay-noarch.tar.xz && \
|
||||||
|
echo "${S6_NOARCH_EXPECTED_SHA256} /tmp/s6-overlay-noarch.tar.xz" | sha256sum -c - && \
|
||||||
|
tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz && \
|
||||||
|
curl -L ${S6_DOWNLOAD_ARCH} --output /tmp/s6-overlay-${ARCH}.tar.xz && \
|
||||||
|
echo "${S6_ARCH_EXPECTED_SHA256} /tmp/s6-overlay-${ARCH}.tar.xz" | sha256sum -c - && \
|
||||||
|
tar -C / -Jxpf /tmp/s6-overlay-${ARCH}.tar.xz && \
|
||||||
|
# Install ffmpeg
|
||||||
|
echo "Building for arch: ${ARCH}|${ARCH44}, downloading FFMPEG from: ${FFMPEG_DOWNLOAD}, expecting FFMPEG SHA256: ${FFMPEG_EXPECTED_SHA256}" && \
|
||||||
|
curl -L ${FFMPEG_DOWNLOAD} --output /tmp/ffmpeg-${ARCH}.tar.xz && \
|
||||||
|
sha256sum /tmp/ffmpeg-${ARCH}.tar.xz && \
|
||||||
|
echo "${FFMPEG_EXPECTED_SHA256} /tmp/ffmpeg-${ARCH}.tar.xz" | sha256sum -c - && \
|
||||||
|
tar -xf /tmp/ffmpeg-${ARCH}.tar.xz --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" && \
|
||||||
|
tar -xf /tmp/ffmpeg-${ARCH}.tar.xz --strip-components=2 --no-anchored -C /usr/local/bin/ "ffprobe" && \
|
||||||
|
# Clean up
|
||||||
|
rm -rf /tmp/s6-overlay-${ARCH}.tar.gz && \
|
||||||
|
rm -rf /tmp/ffmpeg-${ARCH}.tar.xz && \
|
||||||
|
apt-get -y autoremove --purge curl binutils xz-utils
|
||||||
|
|
||||||
# Copy app
|
# Copy app
|
||||||
COPY tubesync /app
|
COPY tubesync /app
|
||||||
COPY tubesync/tubesync/local_settings.py.container /app/tubesync/local_settings.py
|
COPY tubesync/tubesync/local_settings.py.container /app/tubesync/local_settings.py
|
||||||
|
|
||||||
# Append container bundled software versions
|
# Copy over pip.conf to use piwheels
|
||||||
RUN echo "ffmpeg_version = '${FFMPEG_VERSION}-static'" >> /app/common/third_party_versions.py
|
COPY pip.conf /etc/pip.conf
|
||||||
|
|
||||||
# Add Pipfile
|
# Add Pipfile
|
||||||
COPY Pipfile /app/Pipfile
|
COPY Pipfile /app/Pipfile
|
||||||
COPY Pipfile.lock /app/Pipfile.lock
|
|
||||||
|
|
||||||
# Switch workdir to the the app
|
# Switch workdir to the the app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Set up the app
|
# Set up the app
|
||||||
RUN set -x && \
|
RUN set -x && \
|
||||||
|
apt-get update && \
|
||||||
# Install required distro packages
|
# Install required distro packages
|
||||||
apt-get -y install nginx-light && \
|
apt-get -y install nginx-light && \
|
||||||
apt-get -y --no-install-recommends install python3 python3-setuptools python3-pip python3-dev gcc make && \
|
apt-get -y --no-install-recommends install \
|
||||||
|
python3 \
|
||||||
|
python3-setuptools \
|
||||||
|
python3-pip \
|
||||||
|
python3-dev \
|
||||||
|
gcc \
|
||||||
|
g++ \
|
||||||
|
make \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
libmariadb3 \
|
||||||
|
postgresql-common \
|
||||||
|
libpq-dev \
|
||||||
|
libpq5 \
|
||||||
|
libjpeg62-turbo \
|
||||||
|
libwebp6 \
|
||||||
|
libjpeg-dev \
|
||||||
|
zlib1g-dev \
|
||||||
|
libwebp-dev \
|
||||||
|
redis-server && \
|
||||||
# Install pipenv
|
# Install pipenv
|
||||||
pip3 --disable-pip-version-check install pipenv && \
|
pip3 --disable-pip-version-check install wheel pipenv && \
|
||||||
# Create a 'app' user which the application will run as
|
# Create a 'app' user which the application will run as
|
||||||
groupadd app && \
|
groupadd app && \
|
||||||
useradd -M -d /app -s /bin/false -g app app && \
|
useradd -M -d /app -s /bin/false -g app app && \
|
||||||
# Install non-distro packages
|
# Install non-distro packages
|
||||||
pipenv install --system && \
|
pipenv install --system --skip-lock && \
|
||||||
# Make absolutely sure we didn't accidentally bundle a SQLite dev database
|
# Make absolutely sure we didn't accidentally bundle a SQLite dev database
|
||||||
rm -rf /app/db.sqlite3 && \
|
rm -rf /app/db.sqlite3 && \
|
||||||
# Run any required app commands
|
# Run any required app commands
|
||||||
@@ -79,10 +118,20 @@ RUN set -x && \
|
|||||||
mkdir -p /downloads/video && \
|
mkdir -p /downloads/video && \
|
||||||
# Clean up
|
# Clean up
|
||||||
rm /app/Pipfile && \
|
rm /app/Pipfile && \
|
||||||
rm /app/Pipfile.lock && \
|
|
||||||
pipenv --clear && \
|
pipenv --clear && \
|
||||||
pip3 --disable-pip-version-check uninstall -y pipenv wheel virtualenv && \
|
pip3 --disable-pip-version-check uninstall -y pipenv wheel virtualenv && \
|
||||||
apt-get -y autoremove --purge python3-pip python3-dev gcc make && \
|
apt-get -y autoremove --purge \
|
||||||
|
python3-pip \
|
||||||
|
python3-dev \
|
||||||
|
gcc \
|
||||||
|
g++ \
|
||||||
|
make \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
postgresql-common \
|
||||||
|
libpq-dev \
|
||||||
|
libjpeg-dev \
|
||||||
|
zlib1g-dev \
|
||||||
|
libwebp-dev && \
|
||||||
apt-get -y autoremove && \
|
apt-get -y autoremove && \
|
||||||
apt-get -y autoclean && \
|
apt-get -y autoclean && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
@@ -92,7 +141,12 @@ RUN set -x && \
|
|||||||
rm -rf /root && \
|
rm -rf /root && \
|
||||||
mkdir -p /root && \
|
mkdir -p /root && \
|
||||||
chown root:root /root && \
|
chown root:root /root && \
|
||||||
chmod 0700 /root
|
chmod 0755 /root
|
||||||
|
|
||||||
|
# Append software versions
|
||||||
|
RUN set -x && \
|
||||||
|
FFMPEG_VERSION=$(/usr/local/bin/ffmpeg -version | head -n 1 | awk '{ print $3 }') && \
|
||||||
|
echo "ffmpeg_version = '${FFMPEG_VERSION}'" >> /app/common/third_party_versions.py
|
||||||
|
|
||||||
# Copy root
|
# Copy root
|
||||||
COPY config/root /
|
COPY config/root /
|
||||||
@@ -102,7 +156,7 @@ HEALTHCHECK --interval=1m --timeout=10s CMD /app/healthcheck.py http://127.0.0.1
|
|||||||
|
|
||||||
# ENVS and ports
|
# ENVS and ports
|
||||||
ENV PYTHONPATH "/app:${PYTHONPATH}"
|
ENV PYTHONPATH "/app:${PYTHONPATH}"
|
||||||
EXPOSE 8080
|
EXPOSE 4848
|
||||||
|
|
||||||
# Volumes
|
# Volumes
|
||||||
VOLUME ["/config", "/downloads"]
|
VOLUME ["/config", "/downloads"]
|
||||||
|
|||||||
14
Makefile
@@ -8,17 +8,17 @@ all: clean build
|
|||||||
|
|
||||||
|
|
||||||
dev:
|
dev:
|
||||||
$(python) app/manage.py runserver
|
$(python) tubesync/manage.py runserver
|
||||||
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
mkdir -p app/media
|
mkdir -p tubesync/media
|
||||||
mkdir -p app/static
|
mkdir -p tubesync/static
|
||||||
$(python) app/manage.py collectstatic --noinput
|
$(python) tubesync/manage.py collectstatic --noinput
|
||||||
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf app/static
|
rm -rf tubesync/static
|
||||||
|
|
||||||
|
|
||||||
container: clean
|
container: clean
|
||||||
@@ -29,5 +29,5 @@ runcontainer:
|
|||||||
$(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image)
|
$(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image)
|
||||||
|
|
||||||
|
|
||||||
test:
|
test: build
|
||||||
$(python) app/manage.py test --verbosity=2
|
cd tubesync && $(python) manage.py test --verbosity=2 && cd ..
|
||||||
|
|||||||
12
Pipfile
@@ -6,7 +6,7 @@ verify_ssl = true
|
|||||||
[dev-packages]
|
[dev-packages]
|
||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
django = "*"
|
django = "~=3.2"
|
||||||
django-sass-processor = "*"
|
django-sass-processor = "*"
|
||||||
libsass = "*"
|
libsass = "*"
|
||||||
pillow = "*"
|
pillow = "*"
|
||||||
@@ -14,9 +14,11 @@ whitenoise = "*"
|
|||||||
gunicorn = "*"
|
gunicorn = "*"
|
||||||
django-compressor = "*"
|
django-compressor = "*"
|
||||||
httptools = "*"
|
httptools = "*"
|
||||||
youtube-dl = "*"
|
|
||||||
django-background-tasks = "*"
|
django-background-tasks = "*"
|
||||||
requests = "*"
|
requests = "*"
|
||||||
|
django-basicauth = "*"
|
||||||
[requires]
|
psycopg2-binary = "*"
|
||||||
python_version = "3"
|
mysqlclient = "*"
|
||||||
|
yt-dlp = "*"
|
||||||
|
redis = "*"
|
||||||
|
hiredis = "*"
|
||||||
|
|||||||
247
Pipfile.lock
generated
@@ -1,247 +0,0 @@
|
|||||||
{
|
|
||||||
"_meta": {
|
|
||||||
"hash": {
|
|
||||||
"sha256": "a4bb556fc61ee4583f9588980450b071814298ee4d1a1023fad149c14d14aaba"
|
|
||||||
},
|
|
||||||
"pipfile-spec": 6,
|
|
||||||
"requires": {
|
|
||||||
"python_version": "3"
|
|
||||||
},
|
|
||||||
"sources": [
|
|
||||||
{
|
|
||||||
"name": "pypi",
|
|
||||||
"url": "https://pypi.org/simple",
|
|
||||||
"verify_ssl": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"default": {
|
|
||||||
"asgiref": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17",
|
|
||||||
"sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0"
|
|
||||||
],
|
|
||||||
"version": "==3.3.1"
|
|
||||||
},
|
|
||||||
"certifi": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c",
|
|
||||||
"sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"
|
|
||||||
],
|
|
||||||
"version": "==2020.12.5"
|
|
||||||
},
|
|
||||||
"chardet": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
|
|
||||||
"sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
|
|
||||||
],
|
|
||||||
"version": "==4.0.0"
|
|
||||||
},
|
|
||||||
"django": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:5c866205f15e7a7123f1eec6ab939d22d5bde1416635cab259684af66d8e48a2",
|
|
||||||
"sha256:edb10b5c45e7e9c0fb1dc00b76ec7449aca258a39ffd613dbd078c51d19c9f03"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==3.1.4"
|
|
||||||
},
|
|
||||||
"django-appconf": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1b1d0e1069c843ebe8ae5aa48ec52403b1440402b320c3e3a206a0907e97bb06",
|
|
||||||
"sha256:be58deb54a43d77d2e1621fe59f787681376d3cd0b8bd8e4758ef6c3a6453380"
|
|
||||||
],
|
|
||||||
"version": "==1.0.4"
|
|
||||||
},
|
|
||||||
"django-background-tasks": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:e1b19e8d495a276c9d64c5a1ff8b41132f75d2f58e45be71b78650dad59af9de"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==1.2.5"
|
|
||||||
},
|
|
||||||
"django-compat": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3ac9a3bedc56b9365d9eb241bc5157d0c193769bf995f9a78dc1bc24e7c2331b"
|
|
||||||
],
|
|
||||||
"version": "==1.0.15"
|
|
||||||
},
|
|
||||||
"django-compressor": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:57ac0a696d061e5fc6fbc55381d2050f353b973fb97eee5593f39247bc0f30af",
|
|
||||||
"sha256:d2ed1c6137ddaac5536233ec0a819e14009553fee0a869bea65d03e5285ba74f"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.4"
|
|
||||||
},
|
|
||||||
"django-sass-processor": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:9b46a12ca8bdcb397d46fbcc49e6a926ff9f76a93c5efeb23b495419fd01fc7a"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.8.2"
|
|
||||||
},
|
|
||||||
"gunicorn": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626",
|
|
||||||
"sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==20.0.4"
|
|
||||||
},
|
|
||||||
"httptools": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0a4b1b2012b28e68306575ad14ad5e9120b34fccd02a81eb08838d7e3bbb48be",
|
|
||||||
"sha256:3592e854424ec94bd17dc3e0c96a64e459ec4147e6d53c0a42d0ebcef9cb9c5d",
|
|
||||||
"sha256:41b573cf33f64a8f8f3400d0a7faf48e1888582b6f6e02b82b9bd4f0bf7497ce",
|
|
||||||
"sha256:56b6393c6ac7abe632f2294da53f30d279130a92e8ae39d8d14ee2e1b05ad1f2",
|
|
||||||
"sha256:86c6acd66765a934e8730bf0e9dfaac6fdcf2a4334212bd4a0a1c78f16475ca6",
|
|
||||||
"sha256:96da81e1992be8ac2fd5597bf0283d832287e20cb3cfde8996d2b00356d4e17f",
|
|
||||||
"sha256:96eb359252aeed57ea5c7b3d79839aaa0382c9d3149f7d24dd7172b1bcecb009",
|
|
||||||
"sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce",
|
|
||||||
"sha256:ac0aa11e99454b6a66989aa2d44bca41d4e0f968e395a0a8f164b401fefe359a",
|
|
||||||
"sha256:bc3114b9edbca5a1eb7ae7db698c669eb53eb8afbbebdde116c174925260849c",
|
|
||||||
"sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4",
|
|
||||||
"sha256:fea04e126014169384dee76a153d4573d90d0cbd1d12185da089f73c78390437"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.1.1"
|
|
||||||
},
|
|
||||||
"idna": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
|
|
||||||
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
|
|
||||||
],
|
|
||||||
"version": "==2.10"
|
|
||||||
},
|
|
||||||
"libsass": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1521d2a8d4b397c6ec90640a1f6b5529077035efc48ef1c2e53095544e713d1b",
|
|
||||||
"sha256:1b2d415bbf6fa7da33ef46e549db1418498267b459978eff8357e5e823962d35",
|
|
||||||
"sha256:25ebc2085f5eee574761ccc8d9cd29a9b436fc970546d5ef08c6fa41eb57dff1",
|
|
||||||
"sha256:2ae806427b28bc1bb7cb0258666d854fcf92ba52a04656b0b17ba5e190fb48a9",
|
|
||||||
"sha256:4a246e4b88fd279abef8b669206228c92534d96ddcd0770d7012088c408dff23",
|
|
||||||
"sha256:553e5096414a8d4fb48d0a48f5a038d3411abe254d79deac5e008516c019e63a",
|
|
||||||
"sha256:697f0f9fa8a1367ca9ec6869437cb235b1c537fc8519983d1d890178614a8903",
|
|
||||||
"sha256:a8fd4af9f853e8bf42b1425c5e48dd90b504fa2e70d7dac5ac80b8c0a5a5fe85",
|
|
||||||
"sha256:c9411fec76f480ffbacc97d8188322e02a5abca6fc78e70b86a2a2b421eae8a2",
|
|
||||||
"sha256:daa98a51086d92aa7e9c8871cf1a8258124b90e2abf4697852a3dca619838618",
|
|
||||||
"sha256:e0e60836eccbf2d9e24ec978a805cd6642fa92515fbd95e3493fee276af76f8a",
|
|
||||||
"sha256:e64ae2587f1a683e831409aad03ba547c245ef997e1329fffadf7a866d2510b8",
|
|
||||||
"sha256:f6852828e9e104d2ce0358b73c550d26dd86cc3a69439438c3b618811b9584f5"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.20.1"
|
|
||||||
},
|
|
||||||
"pillow": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:006de60d7580d81f4a1a7e9f0173dc90a932e3905cc4d47ea909bc946302311a",
|
|
||||||
"sha256:0a2e8d03787ec7ad71dc18aec9367c946ef8ef50e1e78c71f743bc3a770f9fae",
|
|
||||||
"sha256:0eeeae397e5a79dc088d8297a4c2c6f901f8fb30db47795113a4a605d0f1e5ce",
|
|
||||||
"sha256:11c5c6e9b02c9dac08af04f093eb5a2f84857df70a7d4a6a6ad461aca803fb9e",
|
|
||||||
"sha256:2fb113757a369a6cdb189f8df3226e995acfed0a8919a72416626af1a0a71140",
|
|
||||||
"sha256:4b0ef2470c4979e345e4e0cc1bbac65fda11d0d7b789dbac035e4c6ce3f98adb",
|
|
||||||
"sha256:59e903ca800c8cfd1ebe482349ec7c35687b95e98cefae213e271c8c7fffa021",
|
|
||||||
"sha256:5abd653a23c35d980b332bc0431d39663b1709d64142e3652890df4c9b6970f6",
|
|
||||||
"sha256:5f9403af9c790cc18411ea398a6950ee2def2a830ad0cfe6dc9122e6d528b302",
|
|
||||||
"sha256:6b4a8fd632b4ebee28282a9fef4c341835a1aa8671e2770b6f89adc8e8c2703c",
|
|
||||||
"sha256:6c1aca8231625115104a06e4389fcd9ec88f0c9befbabd80dc206c35561be271",
|
|
||||||
"sha256:795e91a60f291e75de2e20e6bdd67770f793c8605b553cb6e4387ce0cb302e09",
|
|
||||||
"sha256:7ba0ba61252ab23052e642abdb17fd08fdcfdbbf3b74c969a30c58ac1ade7cd3",
|
|
||||||
"sha256:7c9401e68730d6c4245b8e361d3d13e1035cbc94db86b49dc7da8bec235d0015",
|
|
||||||
"sha256:81f812d8f5e8a09b246515fac141e9d10113229bc33ea073fec11403b016bcf3",
|
|
||||||
"sha256:895d54c0ddc78a478c80f9c438579ac15f3e27bf442c2a9aa74d41d0e4d12544",
|
|
||||||
"sha256:8de332053707c80963b589b22f8e0229f1be1f3ca862a932c1bcd48dafb18dd8",
|
|
||||||
"sha256:92c882b70a40c79de9f5294dc99390671e07fc0b0113d472cbea3fde15db1792",
|
|
||||||
"sha256:95edb1ed513e68bddc2aee3de66ceaf743590bf16c023fb9977adc4be15bd3f0",
|
|
||||||
"sha256:b63d4ff734263ae4ce6593798bcfee6dbfb00523c82753a3a03cbc05555a9cc3",
|
|
||||||
"sha256:bd7bf289e05470b1bc74889d1466d9ad4a56d201f24397557b6f65c24a6844b8",
|
|
||||||
"sha256:cc3ea6b23954da84dbee8025c616040d9aa5eaf34ea6895a0a762ee9d3e12e11",
|
|
||||||
"sha256:cc9ec588c6ef3a1325fa032ec14d97b7309db493782ea8c304666fb10c3bd9a7",
|
|
||||||
"sha256:d3d07c86d4efa1facdf32aa878bd508c0dc4f87c48125cc16b937baa4e5b5e11",
|
|
||||||
"sha256:d8a96747df78cda35980905bf26e72960cba6d355ace4780d4bdde3b217cdf1e",
|
|
||||||
"sha256:e38d58d9138ef972fceb7aeec4be02e3f01d383723965bfcef14d174c8ccd039",
|
|
||||||
"sha256:eb472586374dc66b31e36e14720747595c2b265ae962987261f044e5cce644b5",
|
|
||||||
"sha256:fbd922f702582cb0d71ef94442bfca57624352622d75e3be7a1e7e9360b07e72"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==8.0.1"
|
|
||||||
},
|
|
||||||
"pytz": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
|
|
||||||
"sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
|
|
||||||
],
|
|
||||||
"version": "==2020.4"
|
|
||||||
},
|
|
||||||
"rcssmin": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:ca87b695d3d7864157773a61263e5abb96006e9ff0e021eff90cbe0e1ba18270"
|
|
||||||
],
|
|
||||||
"version": "==1.0.6"
|
|
||||||
},
|
|
||||||
"requests": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
|
|
||||||
"sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.25.1"
|
|
||||||
},
|
|
||||||
"rjsmin": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0ab825839125eaca57cc59581d72e596e58a7a56fbc0839996b7528f0343a0a8",
|
|
||||||
"sha256:211c2fe8298951663bbc02acdffbf714f6793df54bfc50e1c6c9e71b3f2559a3",
|
|
||||||
"sha256:466fe70cc5647c7c51b3260c7e2e323a98b2b173564247f9c89e977720a0645f",
|
|
||||||
"sha256:585e75a84d9199b68056fd4a083d9a61e2a92dfd10ff6d4ce5bdb04bc3bdbfaf",
|
|
||||||
"sha256:6044ca86e917cd5bb2f95e6679a4192cef812122f28ee08c677513de019629b3",
|
|
||||||
"sha256:714329db774a90947e0e2086cdddb80d5e8c4ac1c70c9f92436378dedb8ae345",
|
|
||||||
"sha256:799890bd07a048892d8d3deb9042dbc20b7f5d0eb7da91e9483c561033b23ce2",
|
|
||||||
"sha256:975b69754d6a76be47c0bead12367a1ca9220d08e5393f80bab0230d4625d1f4",
|
|
||||||
"sha256:b15dc75c71f65d9493a8c7fa233fdcec823e3f1b88ad84a843ffef49b338ac32",
|
|
||||||
"sha256:dd0f4819df4243ffe4c964995794c79ca43943b5b756de84be92b445a652fb86",
|
|
||||||
"sha256:e3908b21ebb584ce74a6ac233bdb5f29485752c9d3be5e50c5484ed74169232c",
|
|
||||||
"sha256:e487a7783ac4339e79ec610b98228eb9ac72178973e3dee16eba0e3feef25924",
|
|
||||||
"sha256:ecd29f1b3e66a4c0753105baec262b331bcbceefc22fbe6f7e8bcd2067bcb4d7"
|
|
||||||
],
|
|
||||||
"version": "==1.1.0"
|
|
||||||
},
|
|
||||||
"six": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
|
|
||||||
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
|
|
||||||
],
|
|
||||||
"version": "==1.15.0"
|
|
||||||
},
|
|
||||||
"sqlparse": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0",
|
|
||||||
"sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8"
|
|
||||||
],
|
|
||||||
"version": "==0.4.1"
|
|
||||||
},
|
|
||||||
"urllib3": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
|
|
||||||
"sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
|
|
||||||
],
|
|
||||||
"version": "==1.26.2"
|
|
||||||
},
|
|
||||||
"whitenoise": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:05ce0be39ad85740a78750c86a93485c40f08ad8c62a6006de0233765996e5c7",
|
|
||||||
"sha256:05d00198c777028d72d8b0bbd234db605ef6d60e9410125124002518a48e515d"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==5.2.0"
|
|
||||||
},
|
|
||||||
"youtube-dl": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:65968065e66966955dc79fad9251565fcc982566118756da624bd21467f3a04c",
|
|
||||||
"sha256:eaa859f15b6897bec21474b7787dc958118c8088e1f24d4ef1d58eab13188958"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2020.12.14"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"develop": {}
|
|
||||||
}
|
|
||||||
152
README.md
@@ -9,42 +9,43 @@ downloaded.
|
|||||||
|
|
||||||
If you want to watch YouTube videos in particular quality or settings from your local
|
If you want to watch YouTube videos in particular quality or settings from your local
|
||||||
media server, then TubeSync is for you. Internally, TubeSync is a web interface wrapper
|
media server, then TubeSync is for you. Internally, TubeSync is a web interface wrapper
|
||||||
on `youtube-dl` and `ffmpeg` with a task scheduler.
|
on `yt-dlp` and `ffmpeg` with a task scheduler.
|
||||||
|
|
||||||
There are several other web interfaces to YouTube and `youtube-dl` all with varying
|
There are several other web interfaces to YouTube and `yt-dlp` all with varying
|
||||||
features and implemenations. TubeSync's largest difference is full PVR experience of
|
features and implementations. TubeSync's largest difference is full PVR experience of
|
||||||
updating media servers and better selection of media formats. Additionally, to be as
|
updating media servers and better selection of media formats. Additionally, to be as
|
||||||
hands-free as possible, TubeSync has gradual retrying of failures with back-off timers
|
hands-free as possible, TubeSync has gradual retrying of failures with back-off timers
|
||||||
so media which fails to download will be retried for an extended period making it,
|
so media which fails to download will be retried for an extended period making it,
|
||||||
hopefully, quite reliable.
|
hopefully, quite reliable.
|
||||||
|
|
||||||
|
|
||||||
# Latest container image
|
# Latest container image
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ghcr.io/meeb/tubesync:v0.4
|
ghcr.io/meeb/tubesync:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
# Screenshots
|
# Screenshots
|
||||||
|
|
||||||
### Dashboard
|
### Dashboard
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Sources overview
|
### Sources overview
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Source details
|
### Source details
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Media overview
|
### Media overview
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Media details
|
### Media details
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
# Requirements
|
# Requirements
|
||||||
@@ -68,11 +69,12 @@ currently just Plex, to complete the PVR experience.
|
|||||||
# Installation
|
# Installation
|
||||||
|
|
||||||
TubeSync is designed to be run in a container, such as via Docker or Podman. It also
|
TubeSync is designed to be run in a container, such as via Docker or Podman. It also
|
||||||
works in a Docker Compose stack. Only `amd64` is initially supported.
|
works in a Docker Compose stack. `amd64` (most desktop PCs and servers) and `arm64`
|
||||||
|
(modern ARM computers, such as the Rasperry Pi 3 or later) are supported.
|
||||||
|
|
||||||
Example (with Docker on *nix):
|
Example (with Docker on *nix):
|
||||||
|
|
||||||
First find your the user ID and group ID you want to run TubeSync as, if you're not
|
First find the user ID and group ID you want to run TubeSync as, if you're not
|
||||||
sure what this is it's probably your current user ID and group ID:
|
sure what this is it's probably your current user ID and group ID:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -97,8 +99,8 @@ $ mkdir /some/directory/tubesync-downloads
|
|||||||
Finally, download and run the container:
|
Finally, download and run the container:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Pull a versioned image
|
# Pull image
|
||||||
$ docker pull ghcr.io/meeb/tubesync:v0.4
|
$ docker pull ghcr.io/meeb/tubesync:latest
|
||||||
# Start the container using your user ID and group ID
|
# Start the container using your user ID and group ID
|
||||||
$ docker run \
|
$ docker run \
|
||||||
-d \
|
-d \
|
||||||
@@ -109,19 +111,21 @@ $ docker run \
|
|||||||
-v /some/directory/tubesync-config:/config \
|
-v /some/directory/tubesync-config:/config \
|
||||||
-v /some/directory/tubesync-downloads:/downloads \
|
-v /some/directory/tubesync-downloads:/downloads \
|
||||||
-p 4848:4848 \
|
-p 4848:4848 \
|
||||||
ghcr.io/meeb/tubesync:v0.4
|
ghcr.io/meeb/tubesync:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
Once running, open `http://localhost:4848` in your browser and you should see the
|
Once running, open `http://localhost:4848` in your browser and you should see the
|
||||||
TubeSync dashboard. If you do, you can proceed to adding some sources (YouTube channels
|
TubeSync dashboard. If you do, you can proceed to adding some sources (YouTube channels
|
||||||
and playlists). If not, check `docker logs tubesync` to see what errors might be
|
and playlists). If not, check `docker logs tubesync` to see what errors might be
|
||||||
occuring, typical ones are file permission issues.
|
occurring, typical ones are file permission issues.
|
||||||
|
|
||||||
Alternatively, for Docker Compose, you can use something like:
|
Alternatively, for Docker Compose, you can use something like:
|
||||||
|
|
||||||
```yaml
|
```yml
|
||||||
|
version: '3.7'
|
||||||
|
services:
|
||||||
tubesync:
|
tubesync:
|
||||||
image: ghcr.io/meeb/tubesync:v0.4
|
image: ghcr.io/meeb/tubesync:latest
|
||||||
container_name: tubesync
|
container_name: tubesync
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
@@ -135,6 +139,41 @@ Alternatively, for Docker Compose, you can use something like:
|
|||||||
- PGID=1000
|
- PGID=1000
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Optional authentication
|
||||||
|
|
||||||
|
Available in `v1.0` (or `:latest`)and later. If you want to enable a basic username and
|
||||||
|
password to be required to access the TubeSync dashboard you can set them with the
|
||||||
|
following environment variables:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
HTTP_USER
|
||||||
|
HTTP_PASS
|
||||||
|
```
|
||||||
|
|
||||||
|
For example, in the `docker run ...` line add in:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
...
|
||||||
|
-e HTTP_USER=some-username \
|
||||||
|
-e HTTP_PASS=some-secure-password \
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Or in your Docker Compose file you would add in:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
...
|
||||||
|
environment:
|
||||||
|
- HTTP_USER=some-username
|
||||||
|
- HTTP_PASS=some-secure-password
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
When BOTH `HTTP_USER` and `HTTP_PASS` are set then basic HTTP authentication will be
|
||||||
|
enabled.
|
||||||
|
|
||||||
|
|
||||||
# Updating
|
# Updating
|
||||||
|
|
||||||
To update, you can just pull a new version of the container image as they are released.
|
To update, you can just pull a new version of the container image as they are released.
|
||||||
@@ -192,14 +231,26 @@ $ docker logs --follow tubesync
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
# Advanced usage guides
|
||||||
|
|
||||||
|
Once you're happy using TubeSync there are some advanced usage guides for more complex
|
||||||
|
and less common features:
|
||||||
|
|
||||||
|
* [Import existing media into TubeSync](https://github.com/meeb/tubesync/blob/main/docs/import-existing-media.md)
|
||||||
|
* [Sync or create missing metadata files](https://github.com/meeb/tubesync/blob/main/docs/create-missing-metadata.md)
|
||||||
|
* [Reset tasks from the command line](https://github.com/meeb/tubesync/blob/main/docs/reset-tasks.md)
|
||||||
|
* [Using PostgreSQL, MySQL or MariaDB as database backends](https://github.com/meeb/tubesync/blob/main/docs/other-database-backends.md)
|
||||||
|
* [Using cookies](https://github.com/meeb/tubesync/blob/main/docs/using-cookies.md)
|
||||||
|
|
||||||
|
|
||||||
# Warnings
|
# Warnings
|
||||||
|
|
||||||
### 1. Index frequency
|
### 1. Index frequency
|
||||||
|
|
||||||
It's a good idea to add sources with as low an index frequency as possible. This is the
|
It's a good idea to add sources with as long of an index frequency as possible. This is
|
||||||
duration between indexes of the source. An index is when TubeSync checks to see
|
the duration between indexes of the source. An index is when TubeSync checks to see
|
||||||
what videos available on a channel or playlist to find new media. Try and keep this as
|
what videos available on a channel or playlist to find new media. Try and keep this as
|
||||||
long as possible, 24 hours if possible.
|
long as possible, up to 24 hours.
|
||||||
|
|
||||||
|
|
||||||
### 2. Indexing massive channels
|
### 2. Indexing massive channels
|
||||||
@@ -209,6 +260,14 @@ every hour" or similar short interval it's entirely possible your TubeSync insta
|
|||||||
spend its entire time just indexing the massive channel over and over again without
|
spend its entire time just indexing the massive channel over and over again without
|
||||||
downloading any media. Check your tasks for the status of your TubeSync install.
|
downloading any media. Check your tasks for the status of your TubeSync install.
|
||||||
|
|
||||||
|
If you add a significant amount of "work" due to adding many large channels you may
|
||||||
|
need to increase the number of background workers by setting the `TUBESYNC_WORKERS`
|
||||||
|
environment variable. Try around ~4 at most, although the absolute maximum allowed is 8.
|
||||||
|
|
||||||
|
**Be nice.** it's likely entirely possible your IP address could get throttled by the
|
||||||
|
source if you try and crawl extremely large amounts very quickly. **Try and be polite
|
||||||
|
with the smallest amount of indexing and concurrent downloads possible for your needs.**
|
||||||
|
|
||||||
|
|
||||||
# FAQ
|
# FAQ
|
||||||
|
|
||||||
@@ -222,8 +281,8 @@ automatically.
|
|||||||
|
|
||||||
### Does TubeSync support any other video platforms?
|
### Does TubeSync support any other video platforms?
|
||||||
|
|
||||||
At the moment, no. This is a first release. The library TubeSync uses that does most
|
At the moment, no. This is a pre-release. The library TubeSync uses that does most
|
||||||
of the downloading work, `youtube-dl`, supports many hundreds of video sources so it's
|
of the downloading work, `yt-dlp`, supports many hundreds of video sources so it's
|
||||||
likely more will be added to TubeSync if there is demand for it.
|
likely more will be added to TubeSync if there is demand for it.
|
||||||
|
|
||||||
### Is there a progress bar?
|
### Is there a progress bar?
|
||||||
@@ -235,27 +294,27 @@ your install is doing check the container logs.
|
|||||||
|
|
||||||
### Are there alerts when a download is complete?
|
### Are there alerts when a download is complete?
|
||||||
|
|
||||||
No, this feature is best served by existing services such as the execelent
|
No, this feature is best served by existing services such as the excellent
|
||||||
[tautulli](https://tautulli.com/) which can monitor your Plex server and send alerts
|
[Tautulli](https://tautulli.com/) which can monitor your Plex server and send alerts
|
||||||
that way.
|
that way.
|
||||||
|
|
||||||
### There's errors in my "tasks" tab!
|
### There are errors in my "tasks" tab!
|
||||||
|
|
||||||
You only really need to worry about these if there is a permanent failure. Some errors
|
You only really need to worry about these if there is a permanent failure. Some errors
|
||||||
are temproary and will be retried for you automatically, such as a download got
|
are temporary and will be retried for you automatically, such as a download got
|
||||||
interrupted and will be tried again later. Sources with permanet errors (such as no
|
interrupted and will be tried again later. Sources with permanent errors (such as no
|
||||||
media available because you got a channel name wrong) will be shown as errors on the
|
media available because you got a channel name wrong) will be shown as errors on the
|
||||||
"sources" tab.
|
"sources" tab.
|
||||||
|
|
||||||
### What is TubeSync written in?
|
### What is TubeSync written in?
|
||||||
|
|
||||||
Python3 using Django, embedding youtube-dl. It's pretty much glue between other much
|
Python3 using Django, embedding yt-dlp. It's pretty much glue between other much
|
||||||
larger libraries.
|
larger libraries.
|
||||||
|
|
||||||
Notable libraries and software used:
|
Notable libraries and software used:
|
||||||
|
|
||||||
* [Django](https://www.djangoproject.com/)
|
* [Django](https://www.djangoproject.com/)
|
||||||
* [youtube-dl](https://yt-dl.org/)
|
* [yt-dlp](https://github.com/yt-dlp/yt-dlp)
|
||||||
* [ffmpeg](https://ffmpeg.org/)
|
* [ffmpeg](https://ffmpeg.org/)
|
||||||
* [Django Background Tasks](https://github.com/arteria/django-background-tasks/)
|
* [Django Background Tasks](https://github.com/arteria/django-background-tasks/)
|
||||||
* [django-sass](https://github.com/coderedcorp/django-sass/)
|
* [django-sass](https://github.com/coderedcorp/django-sass/)
|
||||||
@@ -265,7 +324,7 @@ See the [Pipefile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a ful
|
|||||||
|
|
||||||
### Can I get access to the full Django admin?
|
### Can I get access to the full Django admin?
|
||||||
|
|
||||||
Yes, although pretty much all operations are available through the front end interface
|
Yes, although pretty much all operations are available through the front-end interface
|
||||||
and you can probably break things by playing in the admin. If you still want to access
|
and you can probably break things by playing in the admin. If you still want to access
|
||||||
it you can run:
|
it you can run:
|
||||||
|
|
||||||
@@ -278,7 +337,9 @@ can log in at http://localhost:4848/admin
|
|||||||
|
|
||||||
### Are there user accounts or multi-user support?
|
### Are there user accounts or multi-user support?
|
||||||
|
|
||||||
No not at the moment. This could be added later if there is demand for it.
|
There is support for basic HTTP authentication by setting the `HTTP_USER` and
|
||||||
|
`HTTP_PASS` environment variables. There is not support for multi-user or user
|
||||||
|
management.
|
||||||
|
|
||||||
### Does TubeSync support HTTPS?
|
### Does TubeSync support HTTPS?
|
||||||
|
|
||||||
@@ -293,23 +354,28 @@ Just `amd64` for the moment. Others may be made available if there is demand.
|
|||||||
# Advanced configuration
|
# Advanced configuration
|
||||||
|
|
||||||
There are a number of other environment variables you can set. These are, mostly,
|
There are a number of other environment variables you can set. These are, mostly,
|
||||||
**NOT** required to be set in the default container installation, they are mostly
|
**NOT** required to be set in the default container installation, they are really only
|
||||||
useful if you are manually installing TubeSync in some other environment. These are:
|
useful if you are manually installing TubeSync in some other environment. These are:
|
||||||
|
|
||||||
| Name | What | Example |
|
| Name | What | Example |
|
||||||
| ----------------- | ------------------------------------- | ---------------------------------- |
|
| ------------------------ | ------------------------------------------------------------ | ------------------------------------ |
|
||||||
| DJANGO_SECRET_KEY | Django secret key | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l |
|
| DJANGO_SECRET_KEY | Django's SECRET_KEY | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l |
|
||||||
| TUBESYNC_DEBUG | Enable debugging | True |
|
| DJANGO_URL_PREFIX | Run TubeSync in a sub-URL on the web server | /somepath/ |
|
||||||
| TUBESYNC_HOSTS | Django's ALLOWED_HOSTS | tubesync.example.com,otherhost.com |
|
| TUBESYNC_DEBUG | Enable debugging | True |
|
||||||
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
|
| TUBESYNC_WORKERS | Number of background workers, default is 2, max allowed is 8 | 2 |
|
||||||
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
|
| TUBESYNC_HOSTS | Django's ALLOWED_HOSTS, defaults to `*` | tubesync.example.com,otherhost.com |
|
||||||
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
|
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
|
||||||
|
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
|
||||||
|
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
|
||||||
|
| HTTP_USER | Sets the username for HTTP basic authentication | some-username |
|
||||||
|
| HTTP_PASS | Sets the password for HTTP basic authentication | some-secure-password |
|
||||||
|
| DATABASE_CONNECTION | Optional external database connection details | mysql://user:pass@host:port/database |
|
||||||
|
|
||||||
|
|
||||||
# Manual, non-containerised, installation
|
# Manual, non-containerised, installation
|
||||||
|
|
||||||
As a relatively normal Django app you can run TubeSync without the container. Beyond
|
As a relatively normal Django app you can run TubeSync without the container. Beyond
|
||||||
the following rough guide you are on your own and should be knowledgeable about
|
following this rough guide, you are on your own and should be knowledgeable about
|
||||||
installing and running WSGI-based Python web applications before attempting this.
|
installing and running WSGI-based Python web applications before attempting this.
|
||||||
|
|
||||||
1. Clone or download this repo
|
1. Clone or download this repo
|
||||||
@@ -320,7 +386,7 @@ installing and running WSGI-based Python web applications before attempting this
|
|||||||
`tubesync/tubesync/local_settings.py` and edit it as appropriate
|
`tubesync/tubesync/local_settings.py` and edit it as appropriate
|
||||||
5. Run migrations with `./manage.py migrate`
|
5. Run migrations with `./manage.py migrate`
|
||||||
6. Collect static files with `./manage.py collectstatic`
|
6. Collect static files with `./manage.py collectstatic`
|
||||||
6. Set up your prefered WSGI server, such as `gunicorn` poiting it to the application
|
6. Set up your prefered WSGI server, such as `gunicorn` pointing it to the application
|
||||||
in `tubesync/tubesync/wsgi.py`
|
in `tubesync/tubesync/wsgi.py`
|
||||||
7. Set up your proxy server such as `nginx` and forward it to the WSGI server
|
7. Set up your proxy server such as `nginx` and forward it to the WSGI server
|
||||||
8. Check the web interface is working
|
8. Check the web interface is working
|
||||||
@@ -332,7 +398,7 @@ installing and running WSGI-based Python web applications before attempting this
|
|||||||
|
|
||||||
# Tests
|
# Tests
|
||||||
|
|
||||||
There is a moderately comprehensive test suite focussing on the custom media format
|
There is a moderately comprehensive test suite focusing on the custom media format
|
||||||
matching logic and that the front-end interface works. You can run it via Django:
|
matching logic and that the front-end interface works. You can run it via Django:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bash
|
|
||||||
|
|
||||||
# Change runtime user UID and GID
|
|
||||||
PUID=${PUID:-911}
|
|
||||||
PGID=${PGID:-911}
|
|
||||||
groupmod -o -g "$PGID" app
|
|
||||||
usermod -o -u "$PUID" app
|
|
||||||
|
|
||||||
# Reset permissions
|
|
||||||
chown -R app:app /run/app && \
|
|
||||||
chmod -R 0700 /run/app && \
|
|
||||||
chown -R app:app /config && \
|
|
||||||
chmod -R 0755 /config && \
|
|
||||||
chown -R app:app /downloads && \
|
|
||||||
chmod -R 0755 /downloads && \
|
|
||||||
chown -R root:app /app && \
|
|
||||||
chmod -R 0750 /app && \
|
|
||||||
chown -R app:app /app/common/static && \
|
|
||||||
chmod -R 0750 /app/common/static && \
|
|
||||||
chown -R app:app /app/static && \
|
|
||||||
chmod -R 0750 /app/static && \
|
|
||||||
find /app -type f -exec chmod 640 {} \; && \
|
|
||||||
chmod +x /app/healthcheck.py
|
|
||||||
|
|
||||||
# Run migrations
|
|
||||||
exec s6-setuidgid app \
|
|
||||||
/usr/bin/python3 /app/manage.py migrate
|
|
||||||
@@ -79,6 +79,11 @@ http {
|
|||||||
proxy_connect_timeout 10;
|
proxy_connect_timeout 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# File dwnload and streaming
|
||||||
|
location /media-data/ {
|
||||||
|
internal;
|
||||||
|
alias /downloads/;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
46
config/root/etc/redis/redis.conf
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
bind 127.0.0.1
|
||||||
|
protected-mode yes
|
||||||
|
port 6379
|
||||||
|
tcp-backlog 511
|
||||||
|
timeout 0
|
||||||
|
tcp-keepalive 300
|
||||||
|
daemonize no
|
||||||
|
supervised no
|
||||||
|
loglevel notice
|
||||||
|
logfile ""
|
||||||
|
databases 1
|
||||||
|
always-show-logo no
|
||||||
|
save ""
|
||||||
|
dir /var/lib/redis
|
||||||
|
maxmemory 64mb
|
||||||
|
maxmemory-policy noeviction
|
||||||
|
lazyfree-lazy-eviction no
|
||||||
|
lazyfree-lazy-expire no
|
||||||
|
lazyfree-lazy-server-del no
|
||||||
|
replica-lazy-flush no
|
||||||
|
lazyfree-lazy-user-del no
|
||||||
|
oom-score-adj no
|
||||||
|
oom-score-adj-values 0 200 800
|
||||||
|
appendonly no
|
||||||
|
appendfsync no
|
||||||
|
lua-time-limit 5000
|
||||||
|
slowlog-log-slower-than 10000
|
||||||
|
slowlog-max-len 128
|
||||||
|
latency-monitor-threshold 0
|
||||||
|
notify-keyspace-events ""
|
||||||
|
hash-max-ziplist-entries 512
|
||||||
|
hash-max-ziplist-value 64
|
||||||
|
list-max-ziplist-size -2
|
||||||
|
list-compress-depth 0
|
||||||
|
set-max-intset-entries 512
|
||||||
|
zset-max-ziplist-entries 128
|
||||||
|
zset-max-ziplist-value 64
|
||||||
|
hll-sparse-max-bytes 3000
|
||||||
|
stream-node-max-bytes 4096
|
||||||
|
stream-node-max-entries 100
|
||||||
|
activerehashing yes
|
||||||
|
client-output-buffer-limit normal 0 0 0
|
||||||
|
client-output-buffer-limit replica 256mb 64mb 60
|
||||||
|
client-output-buffer-limit pubsub 32mb 8mb 60
|
||||||
|
hz 10
|
||||||
|
dynamic-hz yes
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
gunicorn
|
||||||
25
config/root/etc/s6-overlay/s6-rc.d/celery-beat/run
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
|
||||||
|
UMASK_SET=${UMASK_SET:-022}
|
||||||
|
umask "$UMASK_SET"
|
||||||
|
|
||||||
|
cd /app || exit
|
||||||
|
|
||||||
|
PIDFILE=/run/app/celery-beat.pid
|
||||||
|
SCHEDULE=/tmp/tubesync-celerybeat-schedule
|
||||||
|
|
||||||
|
if [ -f "${PIDFILE}" ]
|
||||||
|
then
|
||||||
|
PID=$(cat $PIDFILE)
|
||||||
|
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
|
||||||
|
if kill -0 $PID
|
||||||
|
then
|
||||||
|
echo "Killing old gunicorn process with PID: ${PID}"
|
||||||
|
kill -9 $PID
|
||||||
|
fi
|
||||||
|
echo "Removing stale PID file: ${PIDFILE}"
|
||||||
|
rm ${PIDFILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
#exec s6-setuidgid app \
|
||||||
|
# /usr/local/bin/celery --workdir /app -A tubesync beat --pidfile ${PIDFILE} -s ${SCHEDULE}
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/celery-beat/type
Normal file
@@ -0,0 +1 @@
|
|||||||
|
longrun
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
gunicorn
|
||||||
24
config/root/etc/s6-overlay/s6-rc.d/celery-worker/run
Executable file
@@ -0,0 +1,24 @@
|
|||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
|
||||||
|
UMASK_SET=${UMASK_SET:-022}
|
||||||
|
umask "$UMASK_SET"
|
||||||
|
|
||||||
|
cd /app || exit
|
||||||
|
|
||||||
|
PIDFILE=/run/app/celery-worker.pid
|
||||||
|
|
||||||
|
if [ -f "${PIDFILE}" ]
|
||||||
|
then
|
||||||
|
PID=$(cat $PIDFILE)
|
||||||
|
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
|
||||||
|
if kill -0 $PID
|
||||||
|
then
|
||||||
|
echo "Killing old gunicorn process with PID: ${PID}"
|
||||||
|
kill -9 $PID
|
||||||
|
fi
|
||||||
|
echo "Removing stale PID file: ${PIDFILE}"
|
||||||
|
rm ${PIDFILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
#exec s6-setuidgid app \
|
||||||
|
# /usr/local/bin/celery --workdir /app -A tubesync worker --pidfile ${PIDFILE} -l INFO
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/celery-worker/type
Normal file
@@ -0,0 +1 @@
|
|||||||
|
longrun
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/gunicorn/dependencies
Normal file
@@ -0,0 +1 @@
|
|||||||
|
tubesync-init
|
||||||
24
config/root/etc/s6-overlay/s6-rc.d/gunicorn/run
Executable file
@@ -0,0 +1,24 @@
|
|||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
UMASK_SET=${UMASK_SET:-022}
|
||||||
|
umask "$UMASK_SET"
|
||||||
|
|
||||||
|
cd /app || exit
|
||||||
|
|
||||||
|
PIDFILE=/run/app/gunicorn.pid
|
||||||
|
|
||||||
|
if [ -f "${PIDFILE}" ]
|
||||||
|
then
|
||||||
|
PID=$(cat $PIDFILE)
|
||||||
|
echo "Unexpected PID file exists at ${PIDFILE} with PID: ${PID}"
|
||||||
|
if kill -0 $PID
|
||||||
|
then
|
||||||
|
echo "Killing old gunicorn process with PID: ${PID}"
|
||||||
|
kill -9 $PID
|
||||||
|
fi
|
||||||
|
echo "Removing stale PID file: ${PIDFILE}"
|
||||||
|
rm ${PIDFILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec s6-setuidgid app \
|
||||||
|
/usr/local/bin/gunicorn -c /app/tubesync/gunicorn.py --capture-output tubesync.wsgi:application
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/gunicorn/type
Normal file
@@ -0,0 +1 @@
|
|||||||
|
longrun
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/nginx/dependencies
Normal file
@@ -0,0 +1 @@
|
|||||||
|
gunicorn
|
||||||
5
config/root/etc/s6-overlay/s6-rc.d/nginx/run
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
cd /
|
||||||
|
|
||||||
|
/usr/sbin/nginx
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/nginx/type
Normal file
@@ -0,0 +1 @@
|
|||||||
|
longrun
|
||||||
4
config/root/etc/s6-overlay/s6-rc.d/redis/run
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
exec s6-setuidgid redis \
|
||||||
|
/usr/bin/redis-server /etc/redis/redis.conf
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/redis/type
Normal file
@@ -0,0 +1 @@
|
|||||||
|
longrun
|
||||||
27
config/root/etc/s6-overlay/s6-rc.d/tubesync-init/run
Executable file
@@ -0,0 +1,27 @@
|
|||||||
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
|
# Change runtime user UID and GID
|
||||||
|
PUID="${PUID:-911}"
|
||||||
|
PUID="${PUID:-911}"
|
||||||
|
groupmod -o -g "$PGID" app
|
||||||
|
usermod -o -u "$PUID" app
|
||||||
|
|
||||||
|
# Reset permissions
|
||||||
|
chown -R app:app /run/app
|
||||||
|
chmod -R 0700 /run/app
|
||||||
|
chown -R app:app /config
|
||||||
|
chmod -R 0755 /config
|
||||||
|
chown -R app:app /downloads
|
||||||
|
chmod -R 0755 /downloads
|
||||||
|
chown -R root:app /app
|
||||||
|
chmod -R 0750 /app
|
||||||
|
chown -R app:app /app/common/static
|
||||||
|
chmod -R 0750 /app/common/static
|
||||||
|
chown -R app:app /app/static
|
||||||
|
chmod -R 0750 /app/static
|
||||||
|
find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \;
|
||||||
|
chmod 0755 /app/healthcheck.py
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
exec s6-setuidgid app \
|
||||||
|
/usr/bin/python3 /app/manage.py migrate
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
60000
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/tubesync-init/type
Normal file
@@ -0,0 +1 @@
|
|||||||
|
oneshot
|
||||||
3
config/root/etc/s6-overlay/s6-rc.d/tubesync-init/up
Executable file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/command/execlineb -P
|
||||||
|
|
||||||
|
/etc/s6-overlay/s6-rc.d/tubesync-init/run
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
gunicorn
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/with-contenv bash
|
#!/command/with-contenv bash
|
||||||
|
|
||||||
exec s6-setuidgid app \
|
exec s6-setuidgid app \
|
||||||
/usr/bin/python3 /app/manage.py process_tasks
|
/usr/bin/python3 /app/manage.py process_tasks
|
||||||
1
config/root/etc/s6-overlay/s6-rc.d/tubesync-worker/type
Normal file
@@ -0,0 +1 @@
|
|||||||
|
longrun
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bash
|
|
||||||
|
|
||||||
UMASK_SET=${UMASK_SET:-022}
|
|
||||||
umask "$UMASK_SET"
|
|
||||||
|
|
||||||
cd /app || exit
|
|
||||||
|
|
||||||
exec s6-setuidgid app \
|
|
||||||
/usr/local/bin/gunicorn -c /app/tubesync/gunicorn.py --capture-output tubesync.wsgi:application
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bash
|
|
||||||
|
|
||||||
cd /
|
|
||||||
|
|
||||||
/usr/sbin/nginx
|
|
||||||
37
docs/create-missing-metadata.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - creating missing metadata
|
||||||
|
|
||||||
|
This is a new feature in v0.9 of TubeSync and later. It allows you to create or
|
||||||
|
re-create missing metadata in your TubeSync download directories for missing `nfo`
|
||||||
|
files and thumbnails.
|
||||||
|
|
||||||
|
If you add a source with "write NFO files" or "copy thumbnails" disabled, download
|
||||||
|
some media and then update the source to write NFO files or copy thumbnails then
|
||||||
|
TubeSync will not automatically retroactively attempt to copy or create your missing
|
||||||
|
metadata files. You can use a special one-off command to manually write missing
|
||||||
|
metadata files to the correct locations.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
You have added a source without metadata writing enabled, downloaded some media, then
|
||||||
|
updated the source to enable metadata writing.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Run the batch metadata sync command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py sync-missing-metadata`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py sync-missing-metadata`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
Internally, this command loops over all your sources which have been saved with
|
||||||
|
"write NFO files" or "copy thumbnails" enabled. Then, loops over all media saved to
|
||||||
|
that source and confirms that the appropriate thumbnail files have been copied over and
|
||||||
|
the NFO file has been written if enabled.
|
||||||
BIN
docs/dashboard-v0.5.png
Normal file
|
After Width: | Height: | Size: 188 KiB |
|
Before Width: | Height: | Size: 170 KiB |
81
docs/import-existing-media.md
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - importing existing media
|
||||||
|
|
||||||
|
This is a new feature in v0.9 of TubeSync and later. It allows you to mark existing
|
||||||
|
downloaded media as "downloaded" in TubeSync. You can use this feature if, for example,
|
||||||
|
you already have an extensive catalogue of downloaded media which you want to mark
|
||||||
|
as downloaded into TubeSync so TubeSync doesn't re-download media you already have.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
Your existing downloaded media MUST contain the unique ID. For YouTube videos, this is
|
||||||
|
means the YouTube video ID MUST be in the filename.
|
||||||
|
|
||||||
|
Supported extensions to be imported are .m4a, .ogg, .mkv, .mp3, .mp4 and .avi. Your
|
||||||
|
media you want to import must end in one of these file extensions.
|
||||||
|
|
||||||
|
## Caveats
|
||||||
|
|
||||||
|
As TubeSync does not probe media and your existing media may be re-encoded or in
|
||||||
|
different formats to what is available in the current media metadata there is no way
|
||||||
|
for TubeSync to know what codecs, resolution, bitrate etc. your imported media is in.
|
||||||
|
Any manually imported existing local media will display blank boxes for this
|
||||||
|
information on the TubeSync interface as it's unavailable.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Add your source to TubeSync
|
||||||
|
|
||||||
|
Add your source to TubeSync, such as a YouTube channel. **Make sure you untick the
|
||||||
|
"download media" checkbox.**
|
||||||
|
|
||||||
|
This will allow TubeSync to index all the available media on your source, but won't
|
||||||
|
start downloading any media.
|
||||||
|
|
||||||
|
### 2. Wait
|
||||||
|
|
||||||
|
Wait for all the media on your source to be indexed. This may take some time.
|
||||||
|
|
||||||
|
### 3. Move your existing media into TubeSync
|
||||||
|
|
||||||
|
You now need to move your existing media into TubeSync. You need to move the media
|
||||||
|
files into the correct download directories created by TubeSync. For example, if you
|
||||||
|
have downloaded videos for a YouTube channel "TestChannel", you would have added this
|
||||||
|
as a source called TestChannel and in a directory called test-channel in Tubesync. It
|
||||||
|
would have a download directory created on disk at:
|
||||||
|
|
||||||
|
`/path/to/downloads/test-channel`
|
||||||
|
|
||||||
|
You would move all of your pre-existing videos you downloaded outside of TubeSync for
|
||||||
|
this channel into this directory.
|
||||||
|
|
||||||
|
In short, your existing media needs to be moved into the correct TubeSync source
|
||||||
|
directory to be detected.
|
||||||
|
|
||||||
|
This is required so TubeSync can known which Source to link the media to.
|
||||||
|
|
||||||
|
### 4. Run the batch import command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py import-existing-media`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py import-existing-media`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
Internally, `import-existing-media` looks for the unique media key (for YouTube, this
|
||||||
|
is the YouTube video ID) in the filename and detects the source to link it to based
|
||||||
|
on the directory the media file is inside.
|
||||||
|
|
||||||
|
|
||||||
|
### 5. Re-enable downloading at the source
|
||||||
|
|
||||||
|
Edit your source and re-enable / tick the "download media" option. This will allow
|
||||||
|
TubeSync to download any missing media you did not manually import.
|
||||||
|
|
||||||
|
Note that TubeSync will still get screenshots write `nfo` files etc. for files you
|
||||||
|
manually import if enabled at the source level.
|
||||||
BIN
docs/media-item-v0.5.png
Normal file
|
After Width: | Height: | Size: 361 KiB |
|
Before Width: | Height: | Size: 420 KiB |
BIN
docs/media-v0.5.png
Normal file
|
After Width: | Height: | Size: 666 KiB |
BIN
docs/media.png
|
Before Width: | Height: | Size: 530 KiB |
80
docs/other-database-backends.md
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - using other database backends
|
||||||
|
|
||||||
|
This is a new feature in v1.0 of TubeSync and later. It allows you to use a custom
|
||||||
|
existing external database server instead of the default SQLite database. You may want
|
||||||
|
to use this if you encounter performance issues with adding very large or a large
|
||||||
|
number of channels and database write contention (as shown by errors in the log)
|
||||||
|
become an issue.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
TubeSync supports SQLite (the automatic default) as well as PostgreSQL, MySQL and
|
||||||
|
MariaDB. For MariaDB just follow the MySQL instructions as the driver is the same.
|
||||||
|
|
||||||
|
You should start with a blank install of TubeSync. Migrating to a new database will
|
||||||
|
reset your database. If you are comfortable with Django you can export and re-import
|
||||||
|
existing database data with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ docker exec -i tubesync python3 /app/manage.py dumpdata > some-file.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Then change you database backend over, then use
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata --format=json -
|
||||||
|
```
|
||||||
|
|
||||||
|
As detailed in the Django documentation:
|
||||||
|
|
||||||
|
https://docs.djangoproject.com/en/3.1/ref/django-admin/#dumpdata
|
||||||
|
|
||||||
|
and:
|
||||||
|
|
||||||
|
https://docs.djangoproject.com/en/3.1/ref/django-admin/#loaddata
|
||||||
|
|
||||||
|
Further instructions are beyond the scope of TubeSync documenation and you should refer
|
||||||
|
to Django documentation for more details.
|
||||||
|
|
||||||
|
If you are not comfortable with the above, then skip the `dumpdata` steps, however
|
||||||
|
remember you will start again with a completely new database.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Create a database in your external database server
|
||||||
|
|
||||||
|
You need to create a database and a user with permissions to access the database in
|
||||||
|
your chosen external database server. Steps vary between PostgreSQL, MySQL and MariaDB
|
||||||
|
so this is up to you to work out.
|
||||||
|
|
||||||
|
### 2. Set the database connection string environment variable
|
||||||
|
|
||||||
|
You need to provide the database connection details to TubeSync via an environment
|
||||||
|
variable. The environment variable name is `DATABASE_CONNECTION` and the format is the
|
||||||
|
standard URL-style string. Examples are:
|
||||||
|
|
||||||
|
`postgresql://tubesync:password@localhost:5432/tubesync`
|
||||||
|
|
||||||
|
and
|
||||||
|
|
||||||
|
`mysql://tubesync:password@localhost:3306/tubesync`
|
||||||
|
|
||||||
|
*Important note:* For MySQL databases make SURE you create the tubesync database with
|
||||||
|
`utf8mb4` encoding, like:
|
||||||
|
|
||||||
|
`CREATE DATABASE tubesync CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci;`
|
||||||
|
|
||||||
|
Without `utf8mb4` encoding things like emojis in video titles (or any extended UTF8
|
||||||
|
characters) can cause issues.
|
||||||
|
|
||||||
|
### 3. Start TubeSync and check the logs
|
||||||
|
|
||||||
|
Once you start TubeSync with the new database connection you should see the folling log
|
||||||
|
entry in the container or stdout logs:
|
||||||
|
|
||||||
|
`2021-04-04 22:42:17,912 [tubesync/INFO] Using database connection: django.db.backends.postgresql://tubesync:[hidden]@localhost:5432/tubesync`
|
||||||
|
|
||||||
|
If you see a line similar to the above and the web interface loads, congratulations,
|
||||||
|
you are now using an external database server for your TubeSync data!
|
||||||
33
docs/reset-tasks.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - reset tasks from the command line
|
||||||
|
|
||||||
|
This is a new feature in v1.0 of TubeSync and later. It allows you to reset all
|
||||||
|
scheduled tasks from the command line as well as the "reset tasks" button in the
|
||||||
|
"tasks" tab of the dashboard.
|
||||||
|
|
||||||
|
This is useful for TubeSync installations where you may have a lot of media and
|
||||||
|
sources added and the "reset tasks" button may take too long to the extent where
|
||||||
|
the page times out (with a 502 error or similar issue).
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
You have added some sources and media
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Run the reset tasks command
|
||||||
|
|
||||||
|
Execute the following Django command:
|
||||||
|
|
||||||
|
`./manage.py reset-tasks`
|
||||||
|
|
||||||
|
When deploying TubeSync inside a container, you can execute this with:
|
||||||
|
|
||||||
|
`docker exec -ti tubesync python3 /app/manage.py reset-tasks`
|
||||||
|
|
||||||
|
This command will log what its doing to the terminal when you run it.
|
||||||
|
|
||||||
|
When this is run, new tasks will be immediately created so all your sources will be
|
||||||
|
indexed again straight away, any missing information such as thumbnails will be
|
||||||
|
redownloaded, etc.
|
||||||
BIN
docs/source-v0.5.png
Normal file
|
After Width: | Height: | Size: 118 KiB |
BIN
docs/source.png
|
Before Width: | Height: | Size: 137 KiB |
BIN
docs/sources-v0.5.png
Normal file
|
After Width: | Height: | Size: 52 KiB |
BIN
docs/sources.png
|
Before Width: | Height: | Size: 51 KiB |
50
docs/using-cookies.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# TubeSync
|
||||||
|
|
||||||
|
## Advanced usage guide - using exported cookies
|
||||||
|
|
||||||
|
This is a new feature in v0.10 of TubeSync and later. It allows you to use the cookies
|
||||||
|
file exported from your browser in "Netscape" format with TubeSync to authenticate
|
||||||
|
to YouTube. This can bypass some throttling, age restrictions and other blocks at
|
||||||
|
YouTube.
|
||||||
|
|
||||||
|
**IMPORTANT NOTE**: Using cookies exported from your browser that is authenticated
|
||||||
|
to YouTube identifes your Google account as using TubeSync. This may result in
|
||||||
|
potential account impacts and is entirely at your own risk. Do not use this
|
||||||
|
feature unless you really know what you're doing.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
Have a browser that supports exporting your cookies and be logged into YouTube.
|
||||||
|
|
||||||
|
## Steps
|
||||||
|
|
||||||
|
### 1. Export your cookies
|
||||||
|
|
||||||
|
You need to export cookies for youtube.com from your browser, you can either do
|
||||||
|
this manually or there are plug-ins to automate this for you. This file must be
|
||||||
|
in the "Netscape" cookie export format.
|
||||||
|
|
||||||
|
Save your cookies as a `cookies.txt` file.
|
||||||
|
|
||||||
|
### 2. Import into TubeSync
|
||||||
|
|
||||||
|
Drop the `cookies.txt` file into your TubeSync `config` directory.
|
||||||
|
|
||||||
|
If detected correctly, you will see something like this in the worker or container
|
||||||
|
logs:
|
||||||
|
|
||||||
|
```
|
||||||
|
YYYY-MM-DD HH:MM:SS,mmm [tubesync/INFO] [youtube-dl] using cookies.txt from: /config/cookies.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
If you see that line it's working correctly.
|
||||||
|
|
||||||
|
If you see errors in your logs like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
http.cookiejar.LoadError: '/config/cookies.txt' does not look like a Netscape format cookies file
|
||||||
|
```
|
||||||
|
|
||||||
|
Then your `cookies.txt` file was not generated or created correctly as it's not
|
||||||
|
in the required "Netscape" format. You can fix this by exporting your `cookies.txt`
|
||||||
|
in the correct "Netscape" format.
|
||||||
2
pip.conf
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[global]
|
||||||
|
extra-index-url=https://www.piwheels.org/simple
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from .third_party_versions import youtube_dl_version, ffmpeg_version
|
from .third_party_versions import yt_dlp_version, ffmpeg_version
|
||||||
|
|
||||||
|
|
||||||
def app_details(request):
|
def app_details(request):
|
||||||
return {
|
return {
|
||||||
'app_version': str(settings.VERSION),
|
'app_version': str(settings.VERSION),
|
||||||
'youtube_dl_version': youtube_dl_version,
|
'yt_dlp_version': yt_dlp_version,
|
||||||
'ffmpeg_version': ffmpeg_version,
|
'ffmpeg_version': ffmpeg_version,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,3 +20,10 @@ class DownloadFailedException(Exception):
|
|||||||
exist.
|
exist.
|
||||||
'''
|
'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseConnectionError(Exception):
|
||||||
|
'''
|
||||||
|
Raised when parsing or initially connecting to a database.
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
|
from django.conf import settings
|
||||||
from django.forms import BaseForm
|
from django.forms import BaseForm
|
||||||
|
from basicauth.middleware import BasicAuthMiddleware as BaseBasicAuthMiddleware
|
||||||
|
|
||||||
|
|
||||||
class MaterializeDefaultFieldsMiddleware:
|
class MaterializeDefaultFieldsMiddleware:
|
||||||
@@ -19,3 +21,12 @@ class MaterializeDefaultFieldsMiddleware:
|
|||||||
for _, field in v.fields.items():
|
for _, field in v.fields.items():
|
||||||
field.widget.attrs.update({'class':'browser-default'})
|
field.widget.attrs.update({'class':'browser-default'})
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class BasicAuthMiddleware(BaseBasicAuthMiddleware):
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
bypass_uris = getattr(settings, 'BASICAUTH_ALWAYS_ALLOW_URIS', [])
|
||||||
|
if request.path in bypass_uris:
|
||||||
|
return None
|
||||||
|
return super().process_request(request)
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'roboto-light';
|
font-family: 'roboto-light';
|
||||||
src: url('/static/fonts/roboto/roboto-light.woff') format('woff');
|
src: url('../fonts/roboto/roboto-light.woff') format('woff');
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'roboto-regular';
|
font-family: 'roboto-regular';
|
||||||
src: url('/static/fonts/roboto/roboto-regular.woff') format('woff');
|
src: url('../fonts/roboto/roboto-regular.woff') format('woff');
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'roboto-bold';
|
font-family: 'roboto-bold';
|
||||||
src: url('/static/fonts/roboto/roboto-bold.woff') format('woff');
|
src: url('../fonts/roboto/roboto-bold.woff') format('woff');
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
}
|
}
|
||||||
.help-text {
|
.help-text {
|
||||||
color: $form-help-text-colour;
|
color: $form-help-text-colour;
|
||||||
padding: 1rem 0 1rem 0;
|
padding-bottom: 1rem;
|
||||||
}
|
}
|
||||||
label {
|
label {
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
|
|||||||
@@ -5,6 +5,13 @@ html {
|
|||||||
color: $text-colour;
|
color: $text-colour;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
display: flex;
|
||||||
|
min-height: 100vh;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
header {
|
header {
|
||||||
|
|
||||||
background-color: $header-background-colour;
|
background-color: $header-background-colour;
|
||||||
@@ -174,8 +181,10 @@ main {
|
|||||||
display: inline-block;
|
display: inline-block;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
padding: 5px 10px 5px 10px;
|
padding: 5px 8px 4px 8px;
|
||||||
margin: 0 3px 0 3px;
|
margin: 0 3px 6px 3px;
|
||||||
|
min-width: 40px;
|
||||||
|
min-height: 40px;
|
||||||
background-color: $pagination-background-colour;
|
background-color: $pagination-background-colour;
|
||||||
color: $pagination-text-colour;
|
color: $pagination-text-colour;
|
||||||
border: 2px $pagination-border-colour solid;
|
border: 2px $pagination-border-colour solid;
|
||||||
|
|||||||
@@ -16,32 +16,36 @@
|
|||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|
||||||
<header>
|
<div class="app">
|
||||||
<div class="container">
|
|
||||||
<a href="{% url 'sync:dashboard' %}">
|
|
||||||
{% include 'tubesync.svg' with width='3rem' height='3rem' %}
|
|
||||||
<h1>TubeSync</h1>
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<nav>
|
<header>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<ul>
|
<a href="{% url 'sync:dashboard' %}">
|
||||||
<li><a href="{% url 'sync:dashboard' %}"><i class="fas fa-fw fa-th-large"></i><span class="hide-on-med-and-down"> Dashboard</span></a></li>
|
{% include 'tubesync.svg' with width='3rem' height='3rem' %}
|
||||||
<li><a href="{% url 'sync:sources' %}"><i class="fas fa-fw fa-play"></i><span class="hide-on-med-and-down"> Sources</span></a></li>
|
<h1>TubeSync</h1>
|
||||||
<li><a href="{% url 'sync:media' %}"><i class="fas fa-fw fa-film"></i><span class="hide-on-med-and-down"> Media</span></a></li>
|
</a>
|
||||||
<li><a href="{% url 'sync:tasks' %}"><i class="far fa-fw fa-clock"></i><span class="hide-on-med-and-down"> Tasks</span></a></li>
|
</div>
|
||||||
<li><a href="{% url 'sync:mediaservers' %}"><i class="fas fa-fw fa-stream"></i><span class="hide-on-med-and-down"> Media Servers</span></a></li>
|
</header>
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
|
|
||||||
<main>
|
<nav>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
{% block content %}{% endblock %}
|
<ul>
|
||||||
</div>
|
<li><a href="{% url 'sync:dashboard' %}"><i class="fas fa-fw fa-th-large"></i><span class="hide-on-med-and-down"> Dashboard</span></a></li>
|
||||||
</main>
|
<li><a href="{% url 'sync:sources' %}"><i class="fas fa-fw fa-play"></i><span class="hide-on-med-and-down"> Sources</span></a></li>
|
||||||
|
<li><a href="{% url 'sync:media' %}"><i class="fas fa-fw fa-film"></i><span class="hide-on-med-and-down"> Media</span></a></li>
|
||||||
|
<li><a href="{% url 'sync:tasks' %}"><i class="far fa-fw fa-clock"></i><span class="hide-on-med-and-down"> Tasks</span></a></li>
|
||||||
|
<li><a href="{% url 'sync:mediaservers' %}"><i class="fas fa-fw fa-stream"></i><span class="hide-on-med-and-down"> Media Servers</span></a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div class="container">
|
||||||
|
{% block content %}{% endblock %}
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
@@ -53,7 +57,7 @@
|
|||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
<a href="https://github.com/meeb/tubesync" class="nowrap" target="_blank"><i class="fab fa-github"></i> TubeSync</a> version <strong>{{ app_version }}</strong> with
|
<a href="https://github.com/meeb/tubesync" class="nowrap" target="_blank"><i class="fab fa-github"></i> TubeSync</a> version <strong>{{ app_version }}</strong> with
|
||||||
<a href="https://yt-dl.org/" class="nowrap" target="_blank"><i class="fas fa-link"></i> youtube-dl</a> version <strong>{{ youtube_dl_version }}</strong> and
|
<a href="https://github.com/yt-dlp/yt-dlp" class="nowrap" target="_blank"><i class="fas fa-link"></i> yt-dlp</a> version <strong>{{ yt_dlp_version }}</strong> and
|
||||||
<a href="https://ffmpeg.org/" class="nowrap" target="_blank"><i class="fas fa-link"></i> FFmpeg</a> version <strong>{{ ffmpeg_version }}</strong>.
|
<a href="https://ffmpeg.org/" class="nowrap" target="_blank"><i class="fas fa-link"></i> FFmpeg</a> version <strong>{{ ffmpeg_version }}</strong>.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
<div class="pagination">
|
<div class="pagination">
|
||||||
{% for i in paginator.page_range %}
|
{% for i in paginator.page_range %}
|
||||||
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}">{{ i }}</a>
|
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}">{{ i }}</a>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ import os.path
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.test import TestCase, Client
|
from django.test import TestCase, Client
|
||||||
from .testutils import prevent_request_warnings
|
from .testutils import prevent_request_warnings
|
||||||
|
from .utils import parse_database_connection_string, clean_filename
|
||||||
|
from .errors import DatabaseConnectionError
|
||||||
|
|
||||||
|
|
||||||
class ErrorPageTestCase(TestCase):
|
class ErrorPageTestCase(TestCase):
|
||||||
@@ -61,3 +63,75 @@ class CommonStaticTestCase(TestCase):
|
|||||||
favicon_real_path = os.path.join(os.sep.join(root_parts),
|
favicon_real_path = os.path.join(os.sep.join(root_parts),
|
||||||
os.sep.join(url_parts))
|
os.sep.join(url_parts))
|
||||||
self.assertTrue(os.path.exists(favicon_real_path))
|
self.assertTrue(os.path.exists(favicon_real_path))
|
||||||
|
|
||||||
|
|
||||||
|
class UtilsTestCase(TestCase):
|
||||||
|
|
||||||
|
def test_parse_database_connection_string(self):
|
||||||
|
database_dict = parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:5432/tubesync')
|
||||||
|
self.assertEqual(database_dict,
|
||||||
|
{
|
||||||
|
'DRIVER': 'postgresql',
|
||||||
|
'ENGINE': 'django.db.backends.postgresql',
|
||||||
|
'USER': 'tubesync',
|
||||||
|
'PASSWORD': 'password',
|
||||||
|
'HOST': 'localhost',
|
||||||
|
'PORT': 5432,
|
||||||
|
'NAME': 'tubesync',
|
||||||
|
'CONN_MAX_AGE': 300,
|
||||||
|
'OPTIONS': {},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
database_dict = parse_database_connection_string(
|
||||||
|
'mysql://tubesync:password@localhost:3306/tubesync')
|
||||||
|
self.assertEqual(database_dict,
|
||||||
|
{
|
||||||
|
'DRIVER': 'mysql',
|
||||||
|
'ENGINE': 'django.db.backends.mysql',
|
||||||
|
'USER': 'tubesync',
|
||||||
|
'PASSWORD': 'password',
|
||||||
|
'HOST': 'localhost',
|
||||||
|
'PORT': 3306,
|
||||||
|
'NAME': 'tubesync',
|
||||||
|
'CONN_MAX_AGE': 300,
|
||||||
|
'OPTIONS': {'charset': 'utf8mb4'}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# Invalid driver
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'test://tubesync:password@localhost:5432/tubesync')
|
||||||
|
# No username
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://password@localhost:5432/tubesync')
|
||||||
|
# No database name
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@5432')
|
||||||
|
# Invalid port
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:test/tubesync')
|
||||||
|
# Invalid port
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:65537/tubesync')
|
||||||
|
# Invalid username or password
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password:test@localhost:5432/tubesync')
|
||||||
|
# Invalid database name
|
||||||
|
with self.assertRaises(DatabaseConnectionError):
|
||||||
|
parse_database_connection_string(
|
||||||
|
'postgresql://tubesync:password@localhost:5432/tubesync/test')
|
||||||
|
|
||||||
|
def test_clean_filename(self):
|
||||||
|
self.assertEqual(clean_filename('a'), 'a')
|
||||||
|
self.assertEqual(clean_filename('a\t'), 'a')
|
||||||
|
self.assertEqual(clean_filename('a\n'), 'a')
|
||||||
|
self.assertEqual(clean_filename('a a'), 'a a')
|
||||||
|
self.assertEqual(clean_filename('a a'), 'a a')
|
||||||
|
self.assertEqual(clean_filename('a\t\t\ta'), 'a a')
|
||||||
|
self.assertEqual(clean_filename('a\t\t\ta\t\t\t'), 'a a')
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from youtube_dl import version as yt_version
|
from yt_dlp import version as yt_dlp_version
|
||||||
|
|
||||||
|
|
||||||
youtube_dl_version = str(yt_version.__version__)
|
yt_dlp_version = str(yt_dlp_version.__version__)
|
||||||
ffmpeg_version = '(shared install)'
|
ffmpeg_version = '(shared install)'
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,96 @@
|
|||||||
from urllib.parse import urlunsplit, urlencode
|
import string
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import urlunsplit, urlencode, urlparse
|
||||||
|
from yt_dlp.utils import LazyList
|
||||||
|
from .errors import DatabaseConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
def parse_database_connection_string(database_connection_string):
|
||||||
|
'''
|
||||||
|
Parses a connection string in a URL style format, such as:
|
||||||
|
postgresql://tubesync:password@localhost:5432/tubesync
|
||||||
|
mysql://someuser:somepassword@localhost:3306/tubesync
|
||||||
|
into a Django-compatible settings.DATABASES dict format.
|
||||||
|
'''
|
||||||
|
valid_drivers = ('postgresql', 'mysql')
|
||||||
|
default_ports = {
|
||||||
|
'postgresql': 5432,
|
||||||
|
'mysql': 3306,
|
||||||
|
}
|
||||||
|
django_backends = {
|
||||||
|
'postgresql': 'django.db.backends.postgresql',
|
||||||
|
'mysql': 'django.db.backends.mysql',
|
||||||
|
}
|
||||||
|
backend_options = {
|
||||||
|
'postgresql': {},
|
||||||
|
'mysql': {
|
||||||
|
'charset': 'utf8mb4',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
parts = urlparse(str(database_connection_string))
|
||||||
|
except Exception as e:
|
||||||
|
raise DatabaseConnectionError(f'Failed to parse "{database_connection_string}" '
|
||||||
|
f'as a database connection string: {e}') from e
|
||||||
|
driver = parts.scheme
|
||||||
|
user_pass_host_port = parts.netloc
|
||||||
|
database = parts.path
|
||||||
|
if driver not in valid_drivers:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string '
|
||||||
|
f'"{database_connection_string}" specified an '
|
||||||
|
f'invalid driver, must be one of {valid_drivers}')
|
||||||
|
django_driver = django_backends.get(driver)
|
||||||
|
host_parts = user_pass_host_port.split('@')
|
||||||
|
if len(host_parts) != 2:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string netloc must be in '
|
||||||
|
f'the format of user:pass@host')
|
||||||
|
user_pass, host_port = host_parts
|
||||||
|
user_pass_parts = user_pass.split(':')
|
||||||
|
if len(user_pass_parts) != 2:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string netloc must be in '
|
||||||
|
f'the format of user:pass@host')
|
||||||
|
username, password = user_pass_parts
|
||||||
|
host_port_parts = host_port.split(':')
|
||||||
|
if len(host_port_parts) == 1:
|
||||||
|
# No port number, assign a default port
|
||||||
|
hostname = host_port_parts[0]
|
||||||
|
port = default_ports.get(driver)
|
||||||
|
elif len(host_port_parts) == 2:
|
||||||
|
# Host name and port number
|
||||||
|
hostname, port = host_port_parts
|
||||||
|
try:
|
||||||
|
port = int(port)
|
||||||
|
except (ValueError, TypeError) as e:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string contained an '
|
||||||
|
f'invalid port, ports must be integers: '
|
||||||
|
f'{e}') from e
|
||||||
|
if not 0 < port < 63336:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string contained an '
|
||||||
|
f'invalid port, ports must be between 1 and '
|
||||||
|
f'65535, got {port}')
|
||||||
|
else:
|
||||||
|
# Malformed
|
||||||
|
raise DatabaseConnectionError(f'Database connection host must be a hostname or '
|
||||||
|
f'a hostname:port combination')
|
||||||
|
if database.startswith('/'):
|
||||||
|
database = database[1:]
|
||||||
|
if not database:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string path must be a '
|
||||||
|
f'string in the format of /databasename')
|
||||||
|
if '/' in database:
|
||||||
|
raise DatabaseConnectionError(f'Database connection string path can only '
|
||||||
|
f'contain a single string name, got: {database}')
|
||||||
|
return {
|
||||||
|
'DRIVER': driver,
|
||||||
|
'ENGINE': django_driver,
|
||||||
|
'NAME': database,
|
||||||
|
'USER': username,
|
||||||
|
'PASSWORD': password,
|
||||||
|
'HOST': hostname,
|
||||||
|
'PORT': port,
|
||||||
|
'CONN_MAX_AGE': 300,
|
||||||
|
'OPTIONS': backend_options.get(driver),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_client_ip(request):
|
def get_client_ip(request):
|
||||||
@@ -14,3 +106,26 @@ def append_uri_params(uri, params):
|
|||||||
uri = str(uri)
|
uri = str(uri)
|
||||||
qs = urlencode(params)
|
qs = urlencode(params)
|
||||||
return urlunsplit(('', '', uri, qs, ''))
|
return urlunsplit(('', '', uri, qs, ''))
|
||||||
|
|
||||||
|
|
||||||
|
def clean_filename(filename):
|
||||||
|
if not isinstance(filename, str):
|
||||||
|
raise ValueError(f'filename must be a str, got {type(filename)}')
|
||||||
|
to_scrub = '<>\/:*?"|%'
|
||||||
|
for char in to_scrub:
|
||||||
|
filename = filename.replace(char, '')
|
||||||
|
clean_filename = ''
|
||||||
|
for c in filename:
|
||||||
|
if c in string.whitespace:
|
||||||
|
c = ' '
|
||||||
|
if ord(c) > 30:
|
||||||
|
clean_filename += c
|
||||||
|
return clean_filename.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def json_serial(obj):
|
||||||
|
if isinstance(obj, datetime):
|
||||||
|
return obj.isoformat()
|
||||||
|
if isinstance(obj, LazyList):
|
||||||
|
return list(obj)
|
||||||
|
raise TypeError(f'Type {type(obj)} is not json_serial()-able')
|
||||||
|
|||||||
0
tubesync/healthcheck.py
Normal file → Executable file
@@ -7,7 +7,7 @@ class SourceAdmin(admin.ModelAdmin):
|
|||||||
|
|
||||||
ordering = ('-created',)
|
ordering = ('-created',)
|
||||||
list_display = ('uuid', 'name', 'source_type', 'last_crawl',
|
list_display = ('uuid', 'name', 'source_type', 'last_crawl',
|
||||||
'has_failed')
|
'download_media', 'has_failed')
|
||||||
readonly_fields = ('uuid', 'created')
|
readonly_fields = ('uuid', 'created')
|
||||||
search_fields = ('uuid', 'key', 'name')
|
search_fields = ('uuid', 'key', 'name')
|
||||||
|
|
||||||
|
|||||||
0
tubesync/sync/management/__init__.py
Normal file
0
tubesync/sync/management/commands/__init__.py
Normal file
51
tubesync/sync/management/commands/delete-source.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db.models import signals
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media, MediaServer
|
||||||
|
from sync.signals import media_post_delete
|
||||||
|
from sync.tasks import rescan_media_server
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = ('Deletes a source by UUID')
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--source', action='store', required=True, help='Source UUID')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
source_uuid_str = options.get('source', '')
|
||||||
|
try:
|
||||||
|
source_uuid = uuid.UUID(source_uuid_str)
|
||||||
|
except Exception as e:
|
||||||
|
raise CommandError(f'Failed to parse source UUID: {e}')
|
||||||
|
log.info(f'Deleting source with UUID: {source_uuid}')
|
||||||
|
# Fetch the source by UUID
|
||||||
|
try:
|
||||||
|
source = Source.objects.get(uuid=source_uuid)
|
||||||
|
except Source.DoesNotExist:
|
||||||
|
raise CommandError(f'Source does not exist with '
|
||||||
|
f'UUID: {source_uuid}')
|
||||||
|
# Detach post-delete signal for Media so we don't spam media servers
|
||||||
|
signals.post_delete.disconnect(media_post_delete, sender=Media)
|
||||||
|
# Delete the source, triggering pre-delete signals for each media item
|
||||||
|
log.info(f'Found source with UUID "{source.uuid}" with name '
|
||||||
|
f'"{source.name}" and deleting it, this may take some time!')
|
||||||
|
source.delete()
|
||||||
|
# Update any media servers
|
||||||
|
for mediaserver in MediaServer.objects.all():
|
||||||
|
log.info(f'Scheduling media server updates')
|
||||||
|
verbose_name = _('Request media server rescan for "{}"')
|
||||||
|
rescan_media_server(
|
||||||
|
str(mediaserver.pk),
|
||||||
|
priority=0,
|
||||||
|
verbose_name=verbose_name.format(mediaserver),
|
||||||
|
remove_existing_tasks=True
|
||||||
|
)
|
||||||
|
# Re-attach signals
|
||||||
|
signals.post_delete.connect(media_post_delete, sender=Media)
|
||||||
|
# All done
|
||||||
|
log.info('Done')
|
||||||
55
tubesync/sync/management/commands/import-existing-media.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = ('Scans download media directories for media not yet downloaded and ',
|
||||||
|
'marks them as downloaded')
|
||||||
|
extra_extensions = ['mp3', 'mp4', 'avi']
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Building directory to Source map...')
|
||||||
|
dirmap = {}
|
||||||
|
for s in Source.objects.all():
|
||||||
|
dirmap[s.directory_path] = s
|
||||||
|
log.info(f'Scanning sources...')
|
||||||
|
file_extensions = list(Source.EXTENSIONS) + self.extra_extensions
|
||||||
|
for sourceroot, source in dirmap.items():
|
||||||
|
media = list(Media.objects.filter(source=source, downloaded=False,
|
||||||
|
skip=False))
|
||||||
|
if not media:
|
||||||
|
log.info(f'Source "{source}" has no missing media')
|
||||||
|
continue
|
||||||
|
log.info(f'Scanning Source "{source}" directory for media to '
|
||||||
|
f'import: {sourceroot}, looking for {len(media)} '
|
||||||
|
f'undownloaded and unskipped items')
|
||||||
|
on_disk = []
|
||||||
|
for (root, dirs, files) in os.walk(sourceroot):
|
||||||
|
rootpath = Path(root)
|
||||||
|
for filename in files:
|
||||||
|
filepart, ext = os.path.splitext(filename)
|
||||||
|
if ext.startswith('.'):
|
||||||
|
ext = ext[1:]
|
||||||
|
ext = ext.strip().lower()
|
||||||
|
if ext not in file_extensions:
|
||||||
|
continue
|
||||||
|
on_disk.append(str(rootpath / filename))
|
||||||
|
filemap = {}
|
||||||
|
for item in media:
|
||||||
|
for filepath in on_disk:
|
||||||
|
if item.key in filepath:
|
||||||
|
# The unique item key is in the file name on disk, map it to
|
||||||
|
# the undownloaded media item
|
||||||
|
filemap[filepath] = item
|
||||||
|
continue
|
||||||
|
for filepath, item in filemap.items():
|
||||||
|
log.info(f'Matched on-disk file: {filepath} '
|
||||||
|
f'to media item: {item.source} / {item}')
|
||||||
|
item.media_file.name = filepath
|
||||||
|
item.downloaded = True
|
||||||
|
item.save()
|
||||||
|
log.info('Done')
|
||||||
15
tubesync/sync/management/commands/list-sources.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import os
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media, MediaServer
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = ('Lists sources')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Listing sources...')
|
||||||
|
for source in Source.objects.all():
|
||||||
|
log.info(f' - {source.uuid}: {source.name}')
|
||||||
|
log.info('Done')
|
||||||
33
tubesync/sync/management/commands/reset-tasks.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from background_task.models import Task
|
||||||
|
from sync.models import Source
|
||||||
|
from sync.tasks import index_source_task
|
||||||
|
|
||||||
|
|
||||||
|
from common.logger import log
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Resets all tasks'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Resettings all tasks...')
|
||||||
|
# Delete all tasks
|
||||||
|
Task.objects.all().delete()
|
||||||
|
# Iter all tasks
|
||||||
|
for source in Source.objects.all():
|
||||||
|
# Recreate the initial indexing task
|
||||||
|
log.info(f'Resetting tasks for source: {source}')
|
||||||
|
verbose_name = _('Index media from source "{}"')
|
||||||
|
index_source_task(
|
||||||
|
str(source.pk),
|
||||||
|
repeat=source.index_schedule,
|
||||||
|
queue=str(source.pk),
|
||||||
|
priority=5,
|
||||||
|
verbose_name=verbose_name.format(source.name)
|
||||||
|
)
|
||||||
|
# This also chains down to call each Media objects .save() as well
|
||||||
|
source.save()
|
||||||
|
log.info('Done')
|
||||||
34
tubesync/sync/management/commands/sync-missing-metadata.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import os
|
||||||
|
from shutil import copyfile
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db.models import Q
|
||||||
|
from common.logger import log
|
||||||
|
from sync.models import Source, Media
|
||||||
|
from sync.utils import write_text_file
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Syncs missing metadata (such as nfo files) if source settings are updated'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log.info('Syncing missing metadata...')
|
||||||
|
sources = Source.objects.filter(Q(copy_thumbnails=True) | Q(write_nfo=True))
|
||||||
|
for source in sources.order_by('name'):
|
||||||
|
log.info(f'Finding media for source: {source}')
|
||||||
|
for item in Media.objects.filter(source=source, downloaded=True):
|
||||||
|
log.info(f'Checking media for missing metadata: {source} / {item}')
|
||||||
|
thumbpath = item.thumbpath
|
||||||
|
if not thumbpath.is_file():
|
||||||
|
if item.thumb:
|
||||||
|
log.info(f'Copying missing thumbnail from: {item.thumb.path} '
|
||||||
|
f'to: {thumbpath}')
|
||||||
|
copyfile(item.thumb.path, thumbpath)
|
||||||
|
else:
|
||||||
|
log.error(f'Tried to copy missing thumbnail for {item} but '
|
||||||
|
f'the thumbnail has not been downloaded')
|
||||||
|
nfopath = item.nfopath
|
||||||
|
if not nfopath.is_file():
|
||||||
|
log.info(f'Writing missing NFO file: {nfopath}')
|
||||||
|
write_text_file(nfopath, item.nfoxml)
|
||||||
|
log.info('Done')
|
||||||
20
tubesync/sync/management/commands/youtube-dl-info.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import json
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from sync.youtube import get_media_info
|
||||||
|
from common.utils import json_serial
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = 'Displays information obtained by youtube-dl in JSON to the console'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('url', type=str)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
url = options['url']
|
||||||
|
self.stdout.write(f'Showing information for URL: {url}')
|
||||||
|
info = get_media_info(url)
|
||||||
|
d = json.dumps(info, indent=4, sort_keys=True, default=json_serial)
|
||||||
|
self.stdout.write(d)
|
||||||
|
self.stdout.write('Done')
|
||||||
@@ -66,7 +66,7 @@ def get_best_audio_format(media):
|
|||||||
# No codecs matched
|
# No codecs matched
|
||||||
if media.source.can_fallback:
|
if media.source.can_fallback:
|
||||||
# Can fallback, find the next highest bitrate non-matching codec
|
# Can fallback, find the next highest bitrate non-matching codec
|
||||||
return False, audio_formats[0]
|
return False, audio_formats[0]['id']
|
||||||
else:
|
else:
|
||||||
# Can't fallback
|
# Can't fallback
|
||||||
return False, False
|
return False, False
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ class PlexMediaServer(MediaServer):
|
|||||||
# Seems we have a valid library sections page, get the library IDs
|
# Seems we have a valid library sections page, get the library IDs
|
||||||
remote_libraries = {}
|
remote_libraries = {}
|
||||||
try:
|
try:
|
||||||
for parent in parsed_response.getiterator('MediaContainer'):
|
for parent in parsed_response.iter('MediaContainer'):
|
||||||
for d in parent:
|
for d in parent:
|
||||||
library_id = d.attrib['key']
|
library_id = d.attrib['key']
|
||||||
library_name = d.attrib['title']
|
library_name = d.attrib['title']
|
||||||
|
|||||||
18
tubesync/sync/migrations/0005_auto_20201219_0312.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-19 03:12
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0004_source_media_format'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='source_type',
|
||||||
|
field=models.CharField(choices=[('c', 'YouTube channel'), ('i', 'YouTube channel by ID'), ('p', 'YouTube playlist')], db_index=True, default='c', help_text='Source type', max_length=1, verbose_name='source type'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
tubesync/sync/migrations/0006_source_write_nfo.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-19 03:12
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0005_auto_20201219_0312'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='write_nfo',
|
||||||
|
field=models.BooleanField(default=False, help_text='Write an NFO file with the media, these may be detected and used by some media servers', verbose_name='write nfo'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
tubesync/sync/migrations/0007_auto_20201219_0645.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-19 06:45
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0006_source_write_nfo'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='write_nfo',
|
||||||
|
field=models.BooleanField(default=False, help_text='Write an NFO file in XML with the media info, these may be detected and used by some media servers', verbose_name='write nfo'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
tubesync/sync/migrations/0008_source_download_cap.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-19 06:59
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0007_auto_20201219_0645'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='download_cap',
|
||||||
|
field=models.IntegerField(choices=[(0, 'No cap'), (604800, '1 week (7 days)'), (2592000, '1 month (30 days)'), (7776000, '3 months (90 days)'), (15552000, '6 months (180 days)'), (31536000, '1 year (365 days)'), (63072000, '2 years (730 days)'), (94608000, '3 years (1095 days)'), (157680000, '5 years (1825 days)'), (315360000, '10 years (3650 days)')], default=0, help_text='Do not download media older than this capped date', verbose_name='download cap'),
|
||||||
|
),
|
||||||
|
]
|
||||||
30
tubesync/sync/migrations/0009_auto_20210218_0442.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Generated by Django 3.1.6 on 2021-02-18 04:42
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0008_source_download_cap'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='download_media',
|
||||||
|
field=models.BooleanField(default=True, help_text='Download media from this source, if not selected the source will only be indexed', verbose_name='download media'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='media_file',
|
||||||
|
field=models.FileField(blank=True, help_text='Media file', max_length=200, null=True, storage=django.core.files.storage.FileSystemStorage(location='/home/meeb/Repos/github.com/meeb/tubesync/tubesync/downloads'), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='media_format',
|
||||||
|
field=models.CharField(default='{yyyymmdd}_{source}_{title}_{key}_{format}.{ext}', help_text='File format to use for saving files, detailed options at bottom of page.', max_length=200, verbose_name='media format'),
|
||||||
|
),
|
||||||
|
]
|
||||||
30
tubesync/sync/migrations/0010_auto_20210924_0554.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Generated by Django 3.2.7 on 2021-09-24 05:54
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0009_auto_20210218_0442'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='media_file',
|
||||||
|
field=models.FileField(blank=True, help_text='Media file', max_length=255, null=True, storage=django.core.files.storage.FileSystemStorage(location='/home/meeb/Repos/github.com/meeb/tubesync/tubesync/downloads'), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='index_schedule',
|
||||||
|
field=models.IntegerField(choices=[(3600, 'Every hour'), (7200, 'Every 2 hours'), (10800, 'Every 3 hours'), (14400, 'Every 4 hours'), (18000, 'Every 5 hours'), (21600, 'Every 6 hours'), (43200, 'Every 12 hours'), (86400, 'Every 24 hours'), (259200, 'Every 3 days'), (604800, 'Every 7 days'), (0, 'Never')], db_index=True, default=86400, help_text='Schedule of how often to index the source for new media', verbose_name='index schedule'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='source',
|
||||||
|
name='media_format',
|
||||||
|
field=models.CharField(default='{yyyy_mm_dd}_{source}_{title}_{key}_{format}.{ext}', help_text='File format to use for saving files, detailed options at bottom of page.', max_length=200, verbose_name='media format'),
|
||||||
|
),
|
||||||
|
]
|
||||||
21
tubesync/sync/migrations/0011_auto_20220201_1654.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 3.2.11 on 2022-02-01 16:54
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0010_auto_20210924_0554'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='source',
|
||||||
|
name='write_json',
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False, help_text='Write a JSON file with the media info, these may be detected and used by some media servers', verbose_name='write json'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-06 06:19
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0011_auto_20220201_1654'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='downloaded_format',
|
||||||
|
field=models.CharField(blank=True, help_text='Video format (resolution) of the downloaded media', max_length=30, null=True, verbose_name='downloaded format'),
|
||||||
|
),
|
||||||
|
]
|
||||||
25
tubesync/sync/migrations/0013_fix_elative_media_file.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-06 06:19
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
def fix_media_file(apps, schema_editor):
|
||||||
|
Media = apps.get_model('sync', 'Media')
|
||||||
|
for media in Media.objects.filter(downloaded=True):
|
||||||
|
download_dir = str(settings.DOWNLOAD_ROOT)
|
||||||
|
|
||||||
|
if media.media_file.name.startswith(download_dir):
|
||||||
|
media.media_file.name = media.media_file.name[len(download_dir) + 1:]
|
||||||
|
media.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0012_alter_media_downloaded_format'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(fix_media_file)
|
||||||
|
]
|
||||||
21
tubesync/sync/migrations/0014_alter_media_media_file.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 3.2.15 on 2022-12-28 20:33
|
||||||
|
|
||||||
|
import django.core.files.storage
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import sync.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('sync', '0013_fix_elative_media_file'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='media',
|
||||||
|
name='media_file',
|
||||||
|
field=models.FileField(blank=True, help_text='Media file', max_length=255, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/media-data/', location=str(settings.DOWNLOAD_ROOT)), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from xml.etree import ElementTree
|
||||||
|
from collections import OrderedDict
|
||||||
|
from datetime import datetime, timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
@@ -10,6 +12,7 @@ from django.utils.text import slugify
|
|||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from common.errors import NoFormatException
|
from common.errors import NoFormatException
|
||||||
|
from common.utils import clean_filename
|
||||||
from .youtube import (get_media_info as get_youtube_media_info,
|
from .youtube import (get_media_info as get_youtube_media_info,
|
||||||
download_media as download_youtube_media)
|
download_media as download_youtube_media)
|
||||||
from .utils import seconds_to_timestr, parse_media_format
|
from .utils import seconds_to_timestr, parse_media_format
|
||||||
@@ -18,7 +21,7 @@ from .matching import (get_best_combined_format, get_best_audio_format,
|
|||||||
from .mediaservers import PlexMediaServer
|
from .mediaservers import PlexMediaServer
|
||||||
|
|
||||||
|
|
||||||
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT))
|
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT), base_url='/media-data/')
|
||||||
|
|
||||||
|
|
||||||
class Source(models.Model):
|
class Source(models.Model):
|
||||||
@@ -28,10 +31,13 @@ class Source(models.Model):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL = 'c'
|
SOURCE_TYPE_YOUTUBE_CHANNEL = 'c'
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID = 'i'
|
||||||
SOURCE_TYPE_YOUTUBE_PLAYLIST = 'p'
|
SOURCE_TYPE_YOUTUBE_PLAYLIST = 'p'
|
||||||
SOURCE_TYPES = (SOURCE_TYPE_YOUTUBE_CHANNEL, SOURCE_TYPE_YOUTUBE_PLAYLIST)
|
SOURCE_TYPES = (SOURCE_TYPE_YOUTUBE_CHANNEL, SOURCE_TYPE_YOUTUBE_CHANNEL_ID,
|
||||||
|
SOURCE_TYPE_YOUTUBE_PLAYLIST)
|
||||||
SOURCE_TYPE_CHOICES = (
|
SOURCE_TYPE_CHOICES = (
|
||||||
(SOURCE_TYPE_YOUTUBE_CHANNEL, _('YouTube channel')),
|
(SOURCE_TYPE_YOUTUBE_CHANNEL, _('YouTube channel')),
|
||||||
|
(SOURCE_TYPE_YOUTUBE_CHANNEL_ID, _('YouTube channel by ID')),
|
||||||
(SOURCE_TYPE_YOUTUBE_PLAYLIST, _('YouTube playlist')),
|
(SOURCE_TYPE_YOUTUBE_PLAYLIST, _('YouTube playlist')),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -95,27 +101,54 @@ class Source(models.Model):
|
|||||||
(FALLBACK_NEXT_BEST_HD, _('Get next best resolution but at least HD'))
|
(FALLBACK_NEXT_BEST_HD, _('Get next best resolution but at least HD'))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
EXTENSION_M4A = 'm4a'
|
||||||
|
EXTENSION_OGG = 'ogg'
|
||||||
|
EXTENSION_MKV = 'mkv'
|
||||||
|
EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV)
|
||||||
|
|
||||||
# Fontawesome icons used for the source on the front end
|
# Fontawesome icons used for the source on the front end
|
||||||
ICONS = {
|
ICONS = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
|
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: '<i class="fab fa-youtube"></i>',
|
||||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: '<i class="fab fa-youtube"></i>',
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: '<i class="fab fa-youtube"></i>',
|
||||||
}
|
}
|
||||||
# Format to use to display a URL for the source
|
# Format to use to display a URL for the source
|
||||||
URLS = {
|
URLS = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/c/{key}',
|
SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/c/{key}',
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}',
|
||||||
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||||
|
}
|
||||||
|
# Format used to create indexable URLs
|
||||||
|
INDEX_URLS = {
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/c/{key}/videos',
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}/videos',
|
||||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||||
}
|
}
|
||||||
# Callback functions to get a list of media from the source
|
# Callback functions to get a list of media from the source
|
||||||
INDEXERS = {
|
INDEXERS = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: get_youtube_media_info,
|
||||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: get_youtube_media_info,
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: get_youtube_media_info,
|
||||||
}
|
}
|
||||||
# Field names to find the media ID used as the key when storing media
|
# Field names to find the media ID used as the key when storing media
|
||||||
KEY_FIELD = {
|
KEY_FIELD = {
|
||||||
SOURCE_TYPE_YOUTUBE_CHANNEL: 'id',
|
SOURCE_TYPE_YOUTUBE_CHANNEL: 'id',
|
||||||
|
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'id',
|
||||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'id',
|
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'id',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class CapChoices(models.IntegerChoices):
|
||||||
|
CAP_NOCAP = 0, _('No cap')
|
||||||
|
CAP_7DAYS = 604800, _('1 week (7 days)')
|
||||||
|
CAP_30DAYS = 2592000, _('1 month (30 days)')
|
||||||
|
CAP_90DAYS = 7776000, _('3 months (90 days)')
|
||||||
|
CAP_6MONTHS = 15552000, _('6 months (180 days)')
|
||||||
|
CAP_1YEAR = 31536000, _('1 year (365 days)')
|
||||||
|
CAP_2YEARs = 63072000, _('2 years (730 days)')
|
||||||
|
CAP_3YEARs = 94608000, _('3 years (1095 days)')
|
||||||
|
CAP_5YEARs = 157680000, _('5 years (1825 days)')
|
||||||
|
CAP_10YEARS = 315360000, _('10 years (3650 days)')
|
||||||
|
|
||||||
class IndexSchedule(models.IntegerChoices):
|
class IndexSchedule(models.IntegerChoices):
|
||||||
EVERY_HOUR = 3600, _('Every hour')
|
EVERY_HOUR = 3600, _('Every hour')
|
||||||
EVERY_2_HOURS = 7200, _('Every 2 hours')
|
EVERY_2_HOURS = 7200, _('Every 2 hours')
|
||||||
@@ -125,6 +158,9 @@ class Source(models.Model):
|
|||||||
EVERY_6_HOURS = 21600, _('Every 6 hours')
|
EVERY_6_HOURS = 21600, _('Every 6 hours')
|
||||||
EVERY_12_HOURS = 43200, _('Every 12 hours')
|
EVERY_12_HOURS = 43200, _('Every 12 hours')
|
||||||
EVERY_24_HOURS = 86400, _('Every 24 hours')
|
EVERY_24_HOURS = 86400, _('Every 24 hours')
|
||||||
|
EVERY_3_DAYS = 259200, _('Every 3 days')
|
||||||
|
EVERY_7_DAYS = 604800, _('Every 7 days')
|
||||||
|
NEVER = 0, _('Never')
|
||||||
|
|
||||||
uuid = models.UUIDField(
|
uuid = models.UUIDField(
|
||||||
_('uuid'),
|
_('uuid'),
|
||||||
@@ -179,15 +215,26 @@ class Source(models.Model):
|
|||||||
_('media format'),
|
_('media format'),
|
||||||
max_length=200,
|
max_length=200,
|
||||||
default=settings.MEDIA_FORMATSTR_DEFAULT,
|
default=settings.MEDIA_FORMATSTR_DEFAULT,
|
||||||
help_text=_('File format to use for saving files')
|
help_text=_('File format to use for saving files, detailed options at bottom of page.')
|
||||||
)
|
)
|
||||||
index_schedule = models.IntegerField(
|
index_schedule = models.IntegerField(
|
||||||
_('index schedule'),
|
_('index schedule'),
|
||||||
choices=IndexSchedule.choices,
|
choices=IndexSchedule.choices,
|
||||||
db_index=True,
|
db_index=True,
|
||||||
default=IndexSchedule.EVERY_6_HOURS,
|
default=IndexSchedule.EVERY_24_HOURS,
|
||||||
help_text=_('Schedule of how often to index the source for new media')
|
help_text=_('Schedule of how often to index the source for new media')
|
||||||
)
|
)
|
||||||
|
download_media = models.BooleanField(
|
||||||
|
_('download media'),
|
||||||
|
default=True,
|
||||||
|
help_text=_('Download media from this source, if not selected the source will only be indexed')
|
||||||
|
)
|
||||||
|
download_cap = models.IntegerField(
|
||||||
|
_('download cap'),
|
||||||
|
choices=CapChoices.choices,
|
||||||
|
default=CapChoices.CAP_NOCAP,
|
||||||
|
help_text=_('Do not download media older than this capped date')
|
||||||
|
)
|
||||||
delete_old_media = models.BooleanField(
|
delete_old_media = models.BooleanField(
|
||||||
_('delete old media'),
|
_('delete old media'),
|
||||||
default=False,
|
default=False,
|
||||||
@@ -246,6 +293,16 @@ class Source(models.Model):
|
|||||||
default=False,
|
default=False,
|
||||||
help_text=_('Copy thumbnails with the media, these may be detected and used by some media servers')
|
help_text=_('Copy thumbnails with the media, these may be detected and used by some media servers')
|
||||||
)
|
)
|
||||||
|
write_nfo = models.BooleanField(
|
||||||
|
_('write nfo'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Write an NFO file in XML with the media info, these may be detected and used by some media servers')
|
||||||
|
)
|
||||||
|
write_json = models.BooleanField(
|
||||||
|
_('write json'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Write a JSON file with the media info, these may be detected and used by some media servers')
|
||||||
|
)
|
||||||
has_failed = models.BooleanField(
|
has_failed = models.BooleanField(
|
||||||
_('has failed'),
|
_('has failed'),
|
||||||
default=False,
|
default=False,
|
||||||
@@ -276,6 +333,14 @@ class Source(models.Model):
|
|||||||
def is_video(self):
|
def is_video(self):
|
||||||
return not self.is_audio
|
return not self.is_audio
|
||||||
|
|
||||||
|
@property
|
||||||
|
def download_cap_date(self):
|
||||||
|
delta = self.download_cap
|
||||||
|
if delta > 0:
|
||||||
|
return timezone.now() - timedelta(seconds=delta)
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extension(self):
|
def extension(self):
|
||||||
'''
|
'''
|
||||||
@@ -287,23 +352,32 @@ class Source(models.Model):
|
|||||||
'''
|
'''
|
||||||
if self.is_audio:
|
if self.is_audio:
|
||||||
if self.source_acodec == self.SOURCE_ACODEC_MP4A:
|
if self.source_acodec == self.SOURCE_ACODEC_MP4A:
|
||||||
return 'm4a'
|
return self.EXTENSION_M4A
|
||||||
elif self.source_acodec == self.SOURCE_ACODEC_OPUS:
|
elif self.source_acodec == self.SOURCE_ACODEC_OPUS:
|
||||||
return 'ogg'
|
return self.EXTENSION_OGG
|
||||||
else:
|
else:
|
||||||
raise ValueError('Unable to choose audio extension, uknown acodec')
|
raise ValueError('Unable to choose audio extension, uknown acodec')
|
||||||
else:
|
else:
|
||||||
return 'mkv'
|
return self.EXTENSION_MKV
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_url(obj, source_type, key):
|
def create_url(obj, source_type, key):
|
||||||
url = obj.URLS.get(source_type)
|
url = obj.URLS.get(source_type)
|
||||||
return url.format(key=key)
|
return url.format(key=key)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_index_url(obj, source_type, key):
|
||||||
|
url = obj.INDEX_URLS.get(source_type)
|
||||||
|
return url.format(key=key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
return Source.create_url(self.source_type, self.key)
|
return Source.create_url(self.source_type, self.key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def index_url(self):
|
||||||
|
return Source.create_index_url(self.source_type, self.key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def format_summary(self):
|
def format_summary(self):
|
||||||
if self.source_resolution == Source.SOURCE_RESOLUTION_AUDIO:
|
if self.source_resolution == Source.SOURCE_RESOLUTION_AUDIO:
|
||||||
@@ -318,10 +392,14 @@ class Source(models.Model):
|
|||||||
@property
|
@property
|
||||||
def directory_path(self):
|
def directory_path(self):
|
||||||
download_dir = Path(media_file_storage.location)
|
download_dir = Path(media_file_storage.location)
|
||||||
|
return download_dir / self.type_directory_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type_directory_path(self):
|
||||||
if self.source_resolution == self.SOURCE_RESOLUTION_AUDIO:
|
if self.source_resolution == self.SOURCE_RESOLUTION_AUDIO:
|
||||||
return download_dir / settings.DOWNLOAD_AUDIO_DIR / self.directory
|
return Path(settings.DOWNLOAD_AUDIO_DIR) / self.directory
|
||||||
else:
|
else:
|
||||||
return download_dir / settings.DOWNLOAD_VIDEO_DIR / self.directory
|
return Path(settings.DOWNLOAD_VIDEO_DIR) / self.directory
|
||||||
|
|
||||||
def make_directory(self):
|
def make_directory(self):
|
||||||
return os.makedirs(self.directory_path, exist_ok=True)
|
return os.makedirs(self.directory_path, exist_ok=True)
|
||||||
@@ -359,16 +437,20 @@ class Source(models.Model):
|
|||||||
fmt.append('60fps')
|
fmt.append('60fps')
|
||||||
if self.prefer_hdr:
|
if self.prefer_hdr:
|
||||||
fmt.append('hdr')
|
fmt.append('hdr')
|
||||||
|
now = timezone.now()
|
||||||
return {
|
return {
|
||||||
'yyyymmdd': timezone.now().strftime('%Y%m%d'),
|
'yyyymmdd': now.strftime('%Y%m%d'),
|
||||||
'yyyy_mm_dd': timezone.now().strftime('%Y-%m-%d'),
|
'yyyy_mm_dd': now.strftime('%Y-%m-%d'),
|
||||||
'yyyy': timezone.now().strftime('%Y'),
|
'yyyy': now.strftime('%Y'),
|
||||||
|
'mm': now.strftime('%m'),
|
||||||
|
'dd': now.strftime('%d'),
|
||||||
'source': self.slugname,
|
'source': self.slugname,
|
||||||
'source_full': self.name,
|
'source_full': self.name,
|
||||||
'title': 'some-media-title-name',
|
'title': 'some-media-title-name',
|
||||||
'title_full': 'Some Media Title Name',
|
'title_full': 'Some Media Title Name',
|
||||||
'key': 'SoMeUnIqUiD',
|
'key': 'SoMeUnIqUiD',
|
||||||
'format': '-'.join(fmt),
|
'format': '-'.join(fmt),
|
||||||
|
'playlist_title': 'Some Playlist Title',
|
||||||
'ext': self.extension,
|
'ext': self.extension,
|
||||||
'resolution': self.source_resolution if self.source_resolution else '',
|
'resolution': self.source_resolution if self.source_resolution else '',
|
||||||
'height': '720' if self.source_resolution else '',
|
'height': '720' if self.source_resolution else '',
|
||||||
@@ -382,7 +464,7 @@ class Source(models.Model):
|
|||||||
def get_example_media_format(self):
|
def get_example_media_format(self):
|
||||||
try:
|
try:
|
||||||
return self.media_format.format(**self.example_media_format_dict)
|
return self.media_format.format(**self.example_media_format_dict)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def index_media(self):
|
def index_media(self):
|
||||||
@@ -392,25 +474,14 @@ class Source(models.Model):
|
|||||||
indexer = self.INDEXERS.get(self.source_type, None)
|
indexer = self.INDEXERS.get(self.source_type, None)
|
||||||
if not callable(indexer):
|
if not callable(indexer):
|
||||||
raise Exception(f'Source type f"{self.source_type}" has no indexer')
|
raise Exception(f'Source type f"{self.source_type}" has no indexer')
|
||||||
response = indexer(self.url)
|
response = indexer(self.index_url)
|
||||||
|
if not isinstance(response, dict):
|
||||||
|
return []
|
||||||
|
entries = response.get('entries', [])
|
||||||
|
|
||||||
# Account for nested playlists, such as a channel of playlists of playlists
|
if settings.MAX_ENTRIES_PROCESSING:
|
||||||
def _recurse_playlists(playlist):
|
entries = entries[:settings.MAX_ENTRIES_PROCESSING]
|
||||||
videos = []
|
return entries
|
||||||
if not playlist:
|
|
||||||
return videos
|
|
||||||
entries = playlist.get('entries', [])
|
|
||||||
for entry in entries:
|
|
||||||
if not entry:
|
|
||||||
continue
|
|
||||||
subentries = entry.get('entries', [])
|
|
||||||
if subentries:
|
|
||||||
videos = videos + _recurse_playlists(entry)
|
|
||||||
else:
|
|
||||||
videos.append(entry)
|
|
||||||
return videos
|
|
||||||
|
|
||||||
return _recurse_playlists(response)
|
|
||||||
|
|
||||||
|
|
||||||
def get_media_thumb_path(instance, filename):
|
def get_media_thumb_path(instance, filename):
|
||||||
@@ -433,47 +504,99 @@ class Media(models.Model):
|
|||||||
# Format to use to display a URL for the media
|
# Format to use to display a URL for the media
|
||||||
URLS = {
|
URLS = {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/watch?v={key}',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/watch?v={key}',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/watch?v={key}',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/watch?v={key}',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/watch?v={key}',
|
||||||
}
|
}
|
||||||
|
# Callback functions to get a list of media from the source
|
||||||
|
INDEXERS = {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: get_youtube_media_info,
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: get_youtube_media_info,
|
||||||
|
}
|
||||||
# Maps standardised names to names used in source metdata
|
# Maps standardised names to names used in source metdata
|
||||||
METADATA_FIELDS = {
|
METADATA_FIELDS = {
|
||||||
'upload_date': {
|
'upload_date': {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'upload_date',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'upload_date',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'upload_date',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'upload_date',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'upload_date',
|
||||||
},
|
},
|
||||||
'title': {
|
'title': {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'title',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'title',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'title',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'title',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'title',
|
||||||
},
|
},
|
||||||
'thumbnail': {
|
'thumbnail': {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'thumbnail',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'thumbnail',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'thumbnail',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'thumbnail',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'thumbnail',
|
||||||
},
|
},
|
||||||
'description': {
|
'description': {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'description',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'description',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'description',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'description',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'description',
|
||||||
},
|
},
|
||||||
'duration': {
|
'duration': {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'duration',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'duration',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'duration',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'duration',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'duration',
|
||||||
},
|
},
|
||||||
'formats': {
|
'formats': {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'formats',
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'formats',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'formats',
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'formats',
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'formats',
|
||||||
}
|
},
|
||||||
|
'categories': {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'categories',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'categories',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'categories',
|
||||||
|
},
|
||||||
|
'rating': {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'average_rating',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'average_rating',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'average_rating',
|
||||||
|
},
|
||||||
|
'age_limit': {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'age_limit',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'age_limit',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'age_limit',
|
||||||
|
},
|
||||||
|
'uploader': {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'uploader',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'uploader',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'uploader',
|
||||||
|
},
|
||||||
|
'upvotes': {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'like_count',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'like_count',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'like_count',
|
||||||
|
},
|
||||||
|
'downvotes': {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'dislike_count',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'dislike_count',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'dislike_count',
|
||||||
|
},
|
||||||
|
'playlist_title': {
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: 'playlist_title',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'playlist_title',
|
||||||
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'playlist_title',
|
||||||
|
},
|
||||||
}
|
}
|
||||||
STATE_UNKNOWN = 'unknown'
|
STATE_UNKNOWN = 'unknown'
|
||||||
STATE_SCHEDULED = 'scheduled'
|
STATE_SCHEDULED = 'scheduled'
|
||||||
STATE_DOWNLOADING = 'downloading'
|
STATE_DOWNLOADING = 'downloading'
|
||||||
STATE_DOWNLOADED = 'downloaded'
|
STATE_DOWNLOADED = 'downloaded'
|
||||||
|
STATE_SKIPPED = 'skipped'
|
||||||
|
STATE_DISABLED_AT_SOURCE = 'source-disabled'
|
||||||
STATE_ERROR = 'error'
|
STATE_ERROR = 'error'
|
||||||
STATES = (STATE_UNKNOWN, STATE_SCHEDULED, STATE_DOWNLOADING, STATE_DOWNLOADED,
|
STATES = (STATE_UNKNOWN, STATE_SCHEDULED, STATE_DOWNLOADING, STATE_DOWNLOADED,
|
||||||
STATE_ERROR)
|
STATE_SKIPPED, STATE_DISABLED_AT_SOURCE, STATE_ERROR)
|
||||||
STATE_ICONS = {
|
STATE_ICONS = {
|
||||||
STATE_UNKNOWN: '<i class="far fa-question-circle" title="Unknown download state"></i>',
|
STATE_UNKNOWN: '<i class="far fa-question-circle" title="Unknown download state"></i>',
|
||||||
STATE_SCHEDULED: '<i class="far fa-clock" title="Scheduled to download"></i>',
|
STATE_SCHEDULED: '<i class="far fa-clock" title="Scheduled to download"></i>',
|
||||||
STATE_DOWNLOADING: '<i class="fas fa-download" title="Downloading now"></i>',
|
STATE_DOWNLOADING: '<i class="fas fa-download" title="Downloading now"></i>',
|
||||||
STATE_DOWNLOADED: '<i class="far fa-check-circle" title="Downloaded"></i>',
|
STATE_DOWNLOADED: '<i class="far fa-check-circle" title="Downloaded"></i>',
|
||||||
|
STATE_SKIPPED: '<i class="fas fa-exclamation-circle" title="Skipped"></i>',
|
||||||
|
STATE_DISABLED_AT_SOURCE: '<i class="fas fa-stop-circle" title="Media downloading disabled at source"></i>',
|
||||||
STATE_ERROR: '<i class="fas fa-exclamation-triangle" title="Error downloading"></i>',
|
STATE_ERROR: '<i class="fas fa-exclamation-triangle" title="Error downloading"></i>',
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -546,7 +669,7 @@ class Media(models.Model):
|
|||||||
media_file = models.FileField(
|
media_file = models.FileField(
|
||||||
_('media file'),
|
_('media file'),
|
||||||
upload_to=get_media_file_path,
|
upload_to=get_media_file_path,
|
||||||
max_length=200,
|
max_length=255,
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
storage=media_file_storage,
|
storage=media_file_storage,
|
||||||
@@ -576,7 +699,7 @@ class Media(models.Model):
|
|||||||
max_length=30,
|
max_length=30,
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
help_text=_('Audio codec of the downloaded media')
|
help_text=_('Video format (resolution) of the downloaded media')
|
||||||
)
|
)
|
||||||
downloaded_height = models.PositiveIntegerField(
|
downloaded_height = models.PositiveIntegerField(
|
||||||
_('downloaded height'),
|
_('downloaded height'),
|
||||||
@@ -703,7 +826,24 @@ class Media(models.Model):
|
|||||||
hdr = ''
|
hdr = ''
|
||||||
# If the download has completed use existing values
|
# If the download has completed use existing values
|
||||||
if self.downloaded:
|
if self.downloaded:
|
||||||
resolution = f'{self.downloaded_height}p'
|
# Check if there's any stored meta data at all
|
||||||
|
if (not self.downloaded_video_codec and \
|
||||||
|
not self.downloaded_audio_codec):
|
||||||
|
# Marked as downloaded but no metadata, imported?
|
||||||
|
return {
|
||||||
|
'resolution': resolution,
|
||||||
|
'height': height,
|
||||||
|
'width': width,
|
||||||
|
'vcodec': vcodec,
|
||||||
|
'acodec': acodec,
|
||||||
|
'fps': fps,
|
||||||
|
'hdr': hdr,
|
||||||
|
'format': tuple(fmt),
|
||||||
|
}
|
||||||
|
if self.downloaded_format:
|
||||||
|
resolution = self.downloaded_format.lower()
|
||||||
|
elif self.downloaded_height:
|
||||||
|
resolution = f'{self.downloaded_height}p'
|
||||||
if self.downloaded_format != 'audio':
|
if self.downloaded_format != 'audio':
|
||||||
vcodec = self.downloaded_video_codec.lower()
|
vcodec = self.downloaded_video_codec.lower()
|
||||||
fmt.append(vcodec)
|
fmt.append(vcodec)
|
||||||
@@ -730,7 +870,7 @@ class Media(models.Model):
|
|||||||
# Otherwise, calculate from matched format codes
|
# Otherwise, calculate from matched format codes
|
||||||
vformat = None
|
vformat = None
|
||||||
aformat = None
|
aformat = None
|
||||||
if '+' in format_str:
|
if format_str and '+' in format_str:
|
||||||
# Seperate audio and video streams
|
# Seperate audio and video streams
|
||||||
vformat_code, aformat_code = format_str.split('+')
|
vformat_code, aformat_code = format_str.split('+')
|
||||||
vformat = self.get_format_by_code(vformat_code)
|
vformat = self.get_format_by_code(vformat_code)
|
||||||
@@ -739,7 +879,7 @@ class Media(models.Model):
|
|||||||
# Combined stream or audio only
|
# Combined stream or audio only
|
||||||
cformat = self.get_format_by_code(format_str)
|
cformat = self.get_format_by_code(format_str)
|
||||||
aformat = cformat
|
aformat = cformat
|
||||||
if cformat['vcodec']:
|
if cformat and cformat['vcodec']:
|
||||||
# Combined
|
# Combined
|
||||||
vformat = cformat
|
vformat = cformat
|
||||||
if vformat:
|
if vformat:
|
||||||
@@ -747,8 +887,9 @@ class Media(models.Model):
|
|||||||
fmt.append(resolution)
|
fmt.append(resolution)
|
||||||
vcodec = vformat['vcodec'].lower()
|
vcodec = vformat['vcodec'].lower()
|
||||||
fmt.append(vcodec)
|
fmt.append(vcodec)
|
||||||
acodec = aformat['acodec'].lower()
|
if aformat:
|
||||||
fmt.append(acodec)
|
acodec = aformat['acodec'].lower()
|
||||||
|
fmt.append(acodec)
|
||||||
if vformat:
|
if vformat:
|
||||||
if vformat['is_60fps']:
|
if vformat['is_60fps']:
|
||||||
fps = '60fps'
|
fps = '60fps'
|
||||||
@@ -791,12 +932,15 @@ class Media(models.Model):
|
|||||||
'yyyymmdd': dateobj.strftime('%Y%m%d'),
|
'yyyymmdd': dateobj.strftime('%Y%m%d'),
|
||||||
'yyyy_mm_dd': dateobj.strftime('%Y-%m-%d'),
|
'yyyy_mm_dd': dateobj.strftime('%Y-%m-%d'),
|
||||||
'yyyy': dateobj.strftime('%Y'),
|
'yyyy': dateobj.strftime('%Y'),
|
||||||
|
'mm': dateobj.strftime('%m'),
|
||||||
|
'dd': dateobj.strftime('%d'),
|
||||||
'source': self.source.slugname,
|
'source': self.source.slugname,
|
||||||
'source_full': self.source.name,
|
'source_full': self.source.name,
|
||||||
'title': self.slugtitle,
|
'title': self.slugtitle,
|
||||||
'title_full': self.title,
|
'title_full': clean_filename(self.title),
|
||||||
'key': self.key,
|
'key': self.key,
|
||||||
'format': '-'.join(display_format['format']),
|
'format': '-'.join(display_format['format']),
|
||||||
|
'playlist_title': self.playlist_title,
|
||||||
'ext': self.source.extension,
|
'ext': self.source.extension,
|
||||||
'resolution': display_format['resolution'],
|
'resolution': display_format['resolution'],
|
||||||
'height': display_format['height'],
|
'height': display_format['height'],
|
||||||
@@ -807,10 +951,17 @@ class Media(models.Model):
|
|||||||
'hdr': display_format['hdr'],
|
'hdr': display_format['hdr'],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_metadata(self):
|
||||||
|
return self.metadata is not None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def loaded_metadata(self):
|
def loaded_metadata(self):
|
||||||
try:
|
try:
|
||||||
return json.loads(self.metadata)
|
data = json.loads(self.metadata)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return {}
|
||||||
|
return data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@@ -847,7 +998,10 @@ class Media(models.Model):
|
|||||||
@property
|
@property
|
||||||
def upload_date(self):
|
def upload_date(self):
|
||||||
field = self.get_metadata_field('upload_date')
|
field = self.get_metadata_field('upload_date')
|
||||||
upload_date_str = self.loaded_metadata.get(field, '').strip()
|
try:
|
||||||
|
upload_date_str = self.loaded_metadata.get(field, '').strip()
|
||||||
|
except (AttributeError, ValueError) as e:
|
||||||
|
return None
|
||||||
try:
|
try:
|
||||||
return datetime.strptime(upload_date_str, '%Y%m%d')
|
return datetime.strptime(upload_date_str, '%Y%m%d')
|
||||||
except (AttributeError, ValueError) as e:
|
except (AttributeError, ValueError) as e:
|
||||||
@@ -856,7 +1010,12 @@ class Media(models.Model):
|
|||||||
@property
|
@property
|
||||||
def duration(self):
|
def duration(self):
|
||||||
field = self.get_metadata_field('duration')
|
field = self.get_metadata_field('duration')
|
||||||
return int(self.loaded_metadata.get(field, 0))
|
duration = self.loaded_metadata.get(field, 0)
|
||||||
|
try:
|
||||||
|
duration = int(duration)
|
||||||
|
except ValueError:
|
||||||
|
duration = 0
|
||||||
|
return duration
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def duration_formatted(self):
|
def duration_formatted(self):
|
||||||
@@ -865,26 +1024,96 @@ class Media(models.Model):
|
|||||||
return seconds_to_timestr(duration)
|
return seconds_to_timestr(duration)
|
||||||
return '??:??:??'
|
return '??:??:??'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def categories(self):
|
||||||
|
field = self.get_metadata_field('categories')
|
||||||
|
return self.loaded_metadata.get(field, [])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rating(self):
|
||||||
|
field = self.get_metadata_field('rating')
|
||||||
|
return self.loaded_metadata.get(field, 0)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def votes(self):
|
||||||
|
field = self.get_metadata_field('upvotes')
|
||||||
|
upvotes = self.loaded_metadata.get(field, 0)
|
||||||
|
if not isinstance(upvotes, int):
|
||||||
|
upvotes = 0
|
||||||
|
field = self.get_metadata_field('downvotes')
|
||||||
|
downvotes = self.loaded_metadata.get(field, 0)
|
||||||
|
if not isinstance(downvotes, int):
|
||||||
|
downvotes = 0
|
||||||
|
return upvotes + downvotes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def age_limit(self):
|
||||||
|
field = self.get_metadata_field('age_limit')
|
||||||
|
return self.loaded_metadata.get(field, 0)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uploader(self):
|
||||||
|
field = self.get_metadata_field('uploader')
|
||||||
|
return self.loaded_metadata.get(field, '')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def formats(self):
|
def formats(self):
|
||||||
field = self.get_metadata_field('formats')
|
field = self.get_metadata_field('formats')
|
||||||
return self.loaded_metadata.get(field, [])
|
return self.loaded_metadata.get(field, [])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def playlist_title(self):
|
||||||
|
field = self.get_metadata_field('playlist_title')
|
||||||
|
return self.loaded_metadata.get(field, '')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def filename(self):
|
def filename(self):
|
||||||
# If a media_file has been downloaded use its existing name
|
# Create a suitable filename from the source media_format
|
||||||
if self.media_file:
|
|
||||||
return os.path.basename(self.media_file.name)
|
|
||||||
# Otherwise, create a suitable filename from the source media_format
|
|
||||||
media_format = str(self.source.media_format)
|
media_format = str(self.source.media_format)
|
||||||
media_details = self.format_dict
|
media_details = self.format_dict
|
||||||
return media_format.format(**media_details)
|
return media_format.format(**media_details)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def thumbname(self):
|
||||||
|
if self.downloaded and self.media_file:
|
||||||
|
filename = os.path.basename(self.media_file.path)
|
||||||
|
else:
|
||||||
|
filename = self.filename
|
||||||
|
prefix, ext = os.path.splitext(filename)
|
||||||
|
return f'{prefix}.jpg'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def thumbpath(self):
|
||||||
|
return self.source.directory_path / self.thumbname
|
||||||
|
|
||||||
|
@property
|
||||||
|
def nfoname(self):
|
||||||
|
if self.downloaded and self.media_file:
|
||||||
|
filename = os.path.basename(self.media_file.path)
|
||||||
|
else:
|
||||||
|
filename = self.filename
|
||||||
|
prefix, ext = os.path.splitext(filename)
|
||||||
|
return f'{prefix}.nfo'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def nfopath(self):
|
||||||
|
return self.source.directory_path / self.nfoname
|
||||||
|
|
||||||
|
@property
|
||||||
|
def jsonname(self):
|
||||||
|
if self.downloaded and self.media_file:
|
||||||
|
filename = os.path.basename(self.media_file.path)
|
||||||
|
else:
|
||||||
|
filename = self.filename
|
||||||
|
prefix, ext = os.path.splitext(filename)
|
||||||
|
return f'{prefix}.info.json'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def jsonpath(self):
|
||||||
|
return self.source.directory_path / self.jsonname
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def directory_path(self):
|
def directory_path(self):
|
||||||
# If a media_file has been downloaded use its existing directory
|
|
||||||
if self.media_file:
|
|
||||||
return os.path.dirname(self.media_file.name)
|
|
||||||
# Otherwise, create a suitable filename from the source media_format
|
# Otherwise, create a suitable filename from the source media_format
|
||||||
media_format = str(self.source.media_format)
|
media_format = str(self.source.media_format)
|
||||||
media_details = self.format_dict
|
media_details = self.format_dict
|
||||||
@@ -907,6 +1136,103 @@ class Media(models.Model):
|
|||||||
return False
|
return False
|
||||||
return os.path.exists(self.media_file.path)
|
return os.path.exists(self.media_file.path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def nfoxml(self):
|
||||||
|
'''
|
||||||
|
Returns an NFO formatted (prettified) XML string.
|
||||||
|
'''
|
||||||
|
nfo = ElementTree.Element('episodedetails')
|
||||||
|
nfo.text = '\n '
|
||||||
|
# title = media metadata title
|
||||||
|
title = nfo.makeelement('title', {})
|
||||||
|
title.text = str(self.name).strip()
|
||||||
|
title.tail = '\n '
|
||||||
|
nfo.append(title)
|
||||||
|
# showtitle = source name
|
||||||
|
showtitle = nfo.makeelement('showtitle', {})
|
||||||
|
showtitle.text = str(self.source.name).strip()
|
||||||
|
showtitle.tail = '\n '
|
||||||
|
nfo.append(showtitle)
|
||||||
|
# ratings = media metadata youtube rating
|
||||||
|
value = nfo.makeelement('value', {})
|
||||||
|
value.text = str(self.rating)
|
||||||
|
value.tail = '\n '
|
||||||
|
votes = nfo.makeelement('votes', {})
|
||||||
|
votes.text = str(self.votes)
|
||||||
|
votes.tail = '\n '
|
||||||
|
rating_attrs = OrderedDict()
|
||||||
|
rating_attrs['name'] = 'youtube'
|
||||||
|
rating_attrs['max'] = '5'
|
||||||
|
rating_attrs['default'] = 'True'
|
||||||
|
rating = nfo.makeelement('rating', rating_attrs)
|
||||||
|
rating.text = '\n '
|
||||||
|
rating.append(value)
|
||||||
|
rating.append(votes)
|
||||||
|
rating.tail = '\n '
|
||||||
|
ratings = nfo.makeelement('ratings', {})
|
||||||
|
ratings.text = '\n '
|
||||||
|
ratings.append(rating)
|
||||||
|
ratings.tail = '\n '
|
||||||
|
nfo.append(ratings)
|
||||||
|
# plot = media metadata description
|
||||||
|
plot = nfo.makeelement('plot', {})
|
||||||
|
plot.text = str(self.description).strip()
|
||||||
|
plot.tail = '\n '
|
||||||
|
nfo.append(plot)
|
||||||
|
# thumb = local path to media thumbnail
|
||||||
|
thumb = nfo.makeelement('thumb', {})
|
||||||
|
thumb.text = self.thumbname if self.source.copy_thumbnails else ''
|
||||||
|
thumb.tail = '\n '
|
||||||
|
nfo.append(thumb)
|
||||||
|
# mpaa = media metadata age requirement
|
||||||
|
mpaa = nfo.makeelement('mpaa', {})
|
||||||
|
mpaa.text = str(self.age_limit)
|
||||||
|
mpaa.tail = '\n '
|
||||||
|
nfo.append(mpaa)
|
||||||
|
# runtime = media metadata duration in seconds
|
||||||
|
runtime = nfo.makeelement('runtime', {})
|
||||||
|
runtime.text = str(self.duration)
|
||||||
|
runtime.tail = '\n '
|
||||||
|
nfo.append(runtime)
|
||||||
|
# id = media key
|
||||||
|
idn = nfo.makeelement('id', {})
|
||||||
|
idn.text = str(self.key).strip()
|
||||||
|
idn.tail = '\n '
|
||||||
|
nfo.append(idn)
|
||||||
|
# uniqueid = media key
|
||||||
|
uniqueid_attrs = OrderedDict()
|
||||||
|
uniqueid_attrs['type'] = 'youtube'
|
||||||
|
uniqueid_attrs['default'] = 'True'
|
||||||
|
uniqueid = nfo.makeelement('uniqueid', uniqueid_attrs)
|
||||||
|
uniqueid.text = str(self.key).strip()
|
||||||
|
uniqueid.tail = '\n '
|
||||||
|
nfo.append(uniqueid)
|
||||||
|
# studio = media metadata uploader
|
||||||
|
studio = nfo.makeelement('studio', {})
|
||||||
|
studio.text = str(self.uploader).strip()
|
||||||
|
studio.tail = '\n '
|
||||||
|
nfo.append(studio)
|
||||||
|
# aired = media metadata uploaded date
|
||||||
|
aired = nfo.makeelement('aired', {})
|
||||||
|
upload_date = self.upload_date
|
||||||
|
aired.text = upload_date.strftime('%Y-%m-%d') if upload_date else ''
|
||||||
|
aired.tail = '\n '
|
||||||
|
nfo.append(aired)
|
||||||
|
# dateadded = date and time media was created in tubesync
|
||||||
|
dateadded = nfo.makeelement('dateadded', {})
|
||||||
|
dateadded.text = self.created.strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
dateadded.tail = '\n '
|
||||||
|
nfo.append(dateadded)
|
||||||
|
# genre = any media metadata categories if they exist
|
||||||
|
for category_str in self.categories:
|
||||||
|
genre = nfo.makeelement('genre', {})
|
||||||
|
genre.text = str(category_str).strip()
|
||||||
|
genre.tail = '\n '
|
||||||
|
nfo.append(genre)
|
||||||
|
nfo[-1].tail = '\n'
|
||||||
|
# Return XML tree as a prettified string
|
||||||
|
return ElementTree.tostring(nfo, encoding='utf8', method='xml').decode('utf8')
|
||||||
|
|
||||||
def get_download_state(self, task=None):
|
def get_download_state(self, task=None):
|
||||||
if self.downloaded:
|
if self.downloaded:
|
||||||
return self.STATE_DOWNLOADED
|
return self.STATE_DOWNLOADED
|
||||||
@@ -917,6 +1243,10 @@ class Media(models.Model):
|
|||||||
return self.STATE_ERROR
|
return self.STATE_ERROR
|
||||||
else:
|
else:
|
||||||
return self.STATE_SCHEDULED
|
return self.STATE_SCHEDULED
|
||||||
|
if self.skip:
|
||||||
|
return self.STATE_SKIPPED
|
||||||
|
if not self.source.download_media:
|
||||||
|
return self.STATE_DISABLED_AT_SOURCE
|
||||||
return self.STATE_UNKNOWN
|
return self.STATE_UNKNOWN
|
||||||
|
|
||||||
def get_download_state_icon(self, task=None):
|
def get_download_state_icon(self, task=None):
|
||||||
@@ -930,10 +1260,20 @@ class Media(models.Model):
|
|||||||
f'no valid format available')
|
f'no valid format available')
|
||||||
# Download the media with youtube-dl
|
# Download the media with youtube-dl
|
||||||
download_youtube_media(self.url, format_str, self.source.extension,
|
download_youtube_media(self.url, format_str, self.source.extension,
|
||||||
str(self.filepath))
|
str(self.filepath), self.source.write_json)
|
||||||
# Return the download paramaters
|
# Return the download paramaters
|
||||||
return format_str, self.source.extension
|
return format_str, self.source.extension
|
||||||
|
|
||||||
|
def index_metadata(self):
|
||||||
|
'''
|
||||||
|
Index the media metadata returning a dict of info.
|
||||||
|
'''
|
||||||
|
indexer = self.INDEXERS.get(self.source.source_type, None)
|
||||||
|
if not callable(indexer):
|
||||||
|
raise Exception(f'Media with source type f"{self.source.source_type}" '
|
||||||
|
f'has no indexer')
|
||||||
|
return indexer(self.url)
|
||||||
|
|
||||||
|
|
||||||
class MediaServer(models.Model):
|
class MediaServer(models.Model):
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -8,8 +8,9 @@ from background_task.models import Task
|
|||||||
from common.logger import log
|
from common.logger import log
|
||||||
from .models import Source, Media, MediaServer
|
from .models import Source, Media, MediaServer
|
||||||
from .tasks import (delete_task_by_source, delete_task_by_media, index_source_task,
|
from .tasks import (delete_task_by_source, delete_task_by_media, index_source_task,
|
||||||
download_media_thumbnail, map_task_to_instance,
|
download_media_thumbnail, download_media_metadata,
|
||||||
check_source_directory_exists, download_media, rescan_media_server)
|
map_task_to_instance, check_source_directory_exists,
|
||||||
|
download_media, rescan_media_server)
|
||||||
from .utils import delete_file
|
from .utils import delete_file
|
||||||
|
|
||||||
|
|
||||||
@@ -46,17 +47,18 @@ def source_post_save(sender, instance, created, **kwargs):
|
|||||||
priority=0,
|
priority=0,
|
||||||
verbose_name=verbose_name.format(instance.name)
|
verbose_name=verbose_name.format(instance.name)
|
||||||
)
|
)
|
||||||
delete_task_by_source('sync.tasks.index_source_task', instance.pk)
|
if instance.index_schedule > 0:
|
||||||
log.info(f'Scheduling media indexing for source: {instance.name}')
|
delete_task_by_source('sync.tasks.index_source_task', instance.pk)
|
||||||
verbose_name = _('Index media from source "{}"')
|
log.info(f'Scheduling media indexing for source: {instance.name}')
|
||||||
index_source_task(
|
verbose_name = _('Index media from source "{}"')
|
||||||
str(instance.pk),
|
index_source_task(
|
||||||
repeat=instance.index_schedule,
|
str(instance.pk),
|
||||||
queue=str(instance.pk),
|
repeat=instance.index_schedule,
|
||||||
priority=5,
|
queue=str(instance.pk),
|
||||||
verbose_name=verbose_name.format(instance.name),
|
priority=5,
|
||||||
remove_existing_tasks=True
|
verbose_name=verbose_name.format(instance.name),
|
||||||
)
|
remove_existing_tasks=True
|
||||||
|
)
|
||||||
# Trigger the post_save signal for each media item linked to this source as various
|
# Trigger the post_save signal for each media item linked to this source as various
|
||||||
# flags may need to be recalculated
|
# flags may need to be recalculated
|
||||||
for media in Media.objects.filter(source=instance):
|
for media in Media.objects.filter(source=instance):
|
||||||
@@ -91,18 +93,69 @@ def task_task_failed(sender, task_id, completed_task, **kwargs):
|
|||||||
|
|
||||||
@receiver(post_save, sender=Media)
|
@receiver(post_save, sender=Media)
|
||||||
def media_post_save(sender, instance, created, **kwargs):
|
def media_post_save(sender, instance, created, **kwargs):
|
||||||
# Triggered after media is saved, Recalculate the "can_download" flag, this may
|
# Triggered after media is saved
|
||||||
|
cap_changed = False
|
||||||
|
can_download_changed = False
|
||||||
|
# Reset the skip flag if the download cap has changed if the media has not
|
||||||
|
# already been downloaded
|
||||||
|
if not instance.downloaded:
|
||||||
|
max_cap_age = instance.source.download_cap_date
|
||||||
|
published = instance.published
|
||||||
|
if not published:
|
||||||
|
if not instance.skip:
|
||||||
|
log.warn(f'Media: {instance.source} / {instance} has no published date '
|
||||||
|
f'set, marking to be skipped')
|
||||||
|
instance.skip = True
|
||||||
|
cap_changed = True
|
||||||
|
else:
|
||||||
|
log.debug(f'Media: {instance.source} / {instance} has no published date '
|
||||||
|
f'set but is already marked to be skipped')
|
||||||
|
else:
|
||||||
|
if max_cap_age:
|
||||||
|
if published > max_cap_age and instance.skip:
|
||||||
|
# Media was published after the cap date but is set to be skipped
|
||||||
|
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||||
|
f'publishing date, marking to be unskipped')
|
||||||
|
instance.skip = False
|
||||||
|
cap_changed = True
|
||||||
|
elif published <= max_cap_age and not instance.skip:
|
||||||
|
log.info(f'Media: {instance.source} / {instance} is too old for '
|
||||||
|
f'the download cap date, marking to be skipped')
|
||||||
|
instance.skip = True
|
||||||
|
cap_changed = True
|
||||||
|
else:
|
||||||
|
if instance.skip:
|
||||||
|
# Media marked to be skipped but source download cap removed
|
||||||
|
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||||
|
f'publishing date, marking to be unskipped')
|
||||||
|
instance.skip = False
|
||||||
|
cap_changed = True
|
||||||
|
# Recalculate the "can_download" flag, this may
|
||||||
# need to change if the source specifications have been changed
|
# need to change if the source specifications have been changed
|
||||||
post_save.disconnect(media_post_save, sender=Media)
|
if instance.metadata:
|
||||||
if instance.get_format_str():
|
if instance.get_format_str():
|
||||||
if not instance.can_download:
|
if not instance.can_download:
|
||||||
instance.can_download = True
|
instance.can_download = True
|
||||||
instance.save()
|
can_download_changed = True
|
||||||
else:
|
else:
|
||||||
if instance.can_download:
|
if instance.can_download:
|
||||||
instance.can_download = False
|
instance.can_download = False
|
||||||
instance.save()
|
can_download_changed = True
|
||||||
post_save.connect(media_post_save, sender=Media)
|
# Save the instance if any changes were required
|
||||||
|
if cap_changed or can_download_changed:
|
||||||
|
post_save.disconnect(media_post_save, sender=Media)
|
||||||
|
instance.save()
|
||||||
|
post_save.connect(media_post_save, sender=Media)
|
||||||
|
# If the media is missing metadata schedule it to be downloaded
|
||||||
|
if not instance.metadata:
|
||||||
|
log.info(f'Scheduling task to download metadata for: {instance.url}')
|
||||||
|
verbose_name = _('Downloading metadata for "{}"')
|
||||||
|
download_media_metadata(
|
||||||
|
str(instance.pk),
|
||||||
|
priority=10,
|
||||||
|
verbose_name=verbose_name.format(instance.pk),
|
||||||
|
remove_existing_tasks=True
|
||||||
|
)
|
||||||
# If the media is missing a thumbnail schedule it to be downloaded
|
# If the media is missing a thumbnail schedule it to be downloaded
|
||||||
if not instance.thumb_file_exists:
|
if not instance.thumb_file_exists:
|
||||||
instance.thumb = None
|
instance.thumb = None
|
||||||
@@ -124,7 +177,8 @@ def media_post_save(sender, instance, created, **kwargs):
|
|||||||
if not instance.media_file_exists:
|
if not instance.media_file_exists:
|
||||||
instance.downloaded = False
|
instance.downloaded = False
|
||||||
instance.media_file = None
|
instance.media_file = None
|
||||||
if not instance.downloaded and instance.can_download and not instance.skip:
|
if (not instance.downloaded and instance.can_download and not instance.skip
|
||||||
|
and instance.source.download_media):
|
||||||
delete_task_by_media('sync.tasks.download_media', (str(instance.pk),))
|
delete_task_by_media('sync.tasks.download_media', (str(instance.pk),))
|
||||||
verbose_name = _('Downloading media for "{}"')
|
verbose_name = _('Downloading media for "{}"')
|
||||||
download_media(
|
download_media(
|
||||||
@@ -145,20 +199,6 @@ def media_pre_delete(sender, instance, **kwargs):
|
|||||||
if thumbnail_url:
|
if thumbnail_url:
|
||||||
delete_task_by_media('sync.tasks.download_media_thumbnail',
|
delete_task_by_media('sync.tasks.download_media_thumbnail',
|
||||||
(str(instance.pk), thumbnail_url))
|
(str(instance.pk), thumbnail_url))
|
||||||
# Delete media thumbnail if it exists
|
|
||||||
if instance.thumb:
|
|
||||||
log.info(f'Deleting thumbnail for: {instance} path: {instance.thumb.path}')
|
|
||||||
delete_file(instance.thumb.path)
|
|
||||||
# Delete the media file if it exists
|
|
||||||
if instance.media_file:
|
|
||||||
filepath = instance.media_file.path
|
|
||||||
log.info(f'Deleting media for: {instance} path: {filepath}')
|
|
||||||
delete_file(filepath)
|
|
||||||
# Delete thumbnail copy if it exists
|
|
||||||
barefilepath, fileext = os.path.splitext(filepath)
|
|
||||||
thumbpath = f'{barefilepath}.jpg'
|
|
||||||
log.info(f'Deleting thumbnail for: {instance} path: {thumbpath}')
|
|
||||||
delete_file(thumbpath)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=Media)
|
@receiver(post_delete, sender=Media)
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import math
|
|||||||
import uuid
|
import uuid
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
from datetime import timedelta
|
from datetime import timedelta, datetime
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -22,8 +22,10 @@ from background_task import background
|
|||||||
from background_task.models import Task, CompletedTask
|
from background_task.models import Task, CompletedTask
|
||||||
from common.logger import log
|
from common.logger import log
|
||||||
from common.errors import NoMediaException, DownloadFailedException
|
from common.errors import NoMediaException, DownloadFailedException
|
||||||
|
from common.utils import json_serial
|
||||||
from .models import Source, Media, MediaServer
|
from .models import Source, Media, MediaServer
|
||||||
from .utils import get_remote_image, resize_image_to_height, delete_file
|
from .utils import (get_remote_image, resize_image_to_height, delete_file,
|
||||||
|
write_text_file)
|
||||||
|
|
||||||
|
|
||||||
def get_hash(task_name, pk):
|
def get_hash(task_name, pk):
|
||||||
@@ -174,26 +176,10 @@ def index_source_task(source_id):
|
|||||||
# Video has no unique key (ID), it can't be indexed
|
# Video has no unique key (ID), it can't be indexed
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
media = Media.objects.get(key=key)
|
media = Media.objects.get(key=key, source=source)
|
||||||
except Media.DoesNotExist:
|
except Media.DoesNotExist:
|
||||||
media = Media(key=key)
|
media = Media(key=key)
|
||||||
media.source = source
|
media.source = source
|
||||||
media.metadata = json.dumps(video)
|
|
||||||
upload_date = media.upload_date
|
|
||||||
# Media must have a valid upload date
|
|
||||||
if upload_date:
|
|
||||||
media.published = timezone.make_aware(upload_date)
|
|
||||||
else:
|
|
||||||
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
|
||||||
continue
|
|
||||||
# If the source has a cut-off check the upload date is within the allowed delta
|
|
||||||
if source.delete_old_media and source.days_to_keep > 0:
|
|
||||||
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
|
||||||
if media.published < delta:
|
|
||||||
# Media was published after the cutoff date, skip it
|
|
||||||
log.warn(f'Media: {source} / {media} is older than '
|
|
||||||
f'{source.days_to_keep} days, skipping')
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
media.save()
|
media.save()
|
||||||
log.info(f'Indexed media: {source} / {media}')
|
log.info(f'Indexed media: {source} / {media}')
|
||||||
@@ -225,6 +211,61 @@ def check_source_directory_exists(source_id):
|
|||||||
source.make_directory()
|
source.make_directory()
|
||||||
|
|
||||||
|
|
||||||
|
@background(schedule=0)
|
||||||
|
def download_media_metadata(media_id):
|
||||||
|
'''
|
||||||
|
Downloads the metadata for a media item.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
media = Media.objects.get(pk=media_id)
|
||||||
|
except Media.DoesNotExist:
|
||||||
|
# Task triggered but the media no longer exists, do nothing
|
||||||
|
log.error(f'Task download_media_metadata(pk={media_id}) called but no '
|
||||||
|
f'media exists with ID: {media_id}')
|
||||||
|
return
|
||||||
|
source = media.source
|
||||||
|
metadata = media.index_metadata()
|
||||||
|
media.metadata = json.dumps(metadata, default=json_serial)
|
||||||
|
upload_date = media.upload_date
|
||||||
|
# Media must have a valid upload date
|
||||||
|
if upload_date:
|
||||||
|
media.published = timezone.make_aware(upload_date)
|
||||||
|
else:
|
||||||
|
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
||||||
|
media.skip = True
|
||||||
|
# If the source has a download cap date check the upload date is allowed
|
||||||
|
max_cap_age = source.download_cap_date
|
||||||
|
if media.published and max_cap_age:
|
||||||
|
if media.published < max_cap_age:
|
||||||
|
# Media was published after the cap date, skip it
|
||||||
|
log.warn(f'Media: {source} / {media} is older than cap age '
|
||||||
|
f'{max_cap_age}, skipping')
|
||||||
|
media.skip = True
|
||||||
|
# If the source has a cut-off check the upload date is within the allowed delta
|
||||||
|
if source.delete_old_media and source.days_to_keep > 0:
|
||||||
|
if not isinstance(media.published, datetime):
|
||||||
|
# Media has no known published date or incomplete metadata
|
||||||
|
log.warn(f'Media: {source} / {media} has no published date, skipping')
|
||||||
|
media.skip = True
|
||||||
|
else:
|
||||||
|
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
||||||
|
if media.published < delta:
|
||||||
|
# Media was published after the cutoff date, skip it
|
||||||
|
log.warn(f'Media: {source} / {media} is older than '
|
||||||
|
f'{source.days_to_keep} days, skipping')
|
||||||
|
media.skip = True
|
||||||
|
# Check we can download the media item
|
||||||
|
if not media.skip:
|
||||||
|
if media.get_format_str():
|
||||||
|
media.can_download = True
|
||||||
|
else:
|
||||||
|
media.can_download = False
|
||||||
|
# Save the media
|
||||||
|
media.save()
|
||||||
|
log.info(f'Saved {len(media.metadata)} bytes of metadata for: '
|
||||||
|
f'{source} / {media_id}')
|
||||||
|
|
||||||
|
|
||||||
@background(schedule=0)
|
@background(schedule=0)
|
||||||
def download_media_thumbnail(media_id, url):
|
def download_media_thumbnail(media_id, url):
|
||||||
'''
|
'''
|
||||||
@@ -270,9 +311,28 @@ def download_media(media_id):
|
|||||||
return
|
return
|
||||||
if media.skip:
|
if media.skip:
|
||||||
# Media was toggled to be skipped after the task was scheduled
|
# Media was toggled to be skipped after the task was scheduled
|
||||||
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but it '
|
log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but '
|
||||||
f'is now marked to be skipped, not downloading')
|
f'it is now marked to be skipped, not downloading')
|
||||||
return
|
return
|
||||||
|
if media.downloaded and media.media_file:
|
||||||
|
# Media has been marked as downloaded before the download_media task was fired,
|
||||||
|
# skip it
|
||||||
|
log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but '
|
||||||
|
f'it has already been marked as downloaded, not downloading again')
|
||||||
|
return
|
||||||
|
if not media.source.download_media:
|
||||||
|
log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but '
|
||||||
|
f'the source {media.source} has since been marked to not download, '
|
||||||
|
f'not downloading')
|
||||||
|
return
|
||||||
|
max_cap_age = media.source.download_cap_date
|
||||||
|
published = media.published
|
||||||
|
if max_cap_age and published:
|
||||||
|
if published <= max_cap_age:
|
||||||
|
log.warn(f'Download task triggered media: {media} (UUID: {media.pk}) but '
|
||||||
|
f'the source has a download cap and the media is now too old, '
|
||||||
|
f'not downloading')
|
||||||
|
return
|
||||||
filepath = media.filepath
|
filepath = media.filepath
|
||||||
log.info(f'Downloading media: {media} (UUID: {media.pk}) to: "{filepath}"')
|
log.info(f'Downloading media: {media} (UUID: {media.pk}) to: "{filepath}"')
|
||||||
format_str, container = media.download_media()
|
format_str, container = media.download_media()
|
||||||
@@ -281,7 +341,7 @@ def download_media(media_id):
|
|||||||
log.info(f'Successfully downloaded media: {media} (UUID: {media.pk}) to: '
|
log.info(f'Successfully downloaded media: {media} (UUID: {media.pk}) to: '
|
||||||
f'"{filepath}"')
|
f'"{filepath}"')
|
||||||
# Link the media file to the object and update info about the download
|
# Link the media file to the object and update info about the download
|
||||||
media.media_file.name = str(filepath)
|
media.media_file.name = str(media.source.type_directory_path / media.filename)
|
||||||
media.downloaded = True
|
media.downloaded = True
|
||||||
media.download_date = timezone.now()
|
media.download_date = timezone.now()
|
||||||
media.downloaded_filesize = os.path.getsize(filepath)
|
media.downloaded_filesize = os.path.getsize(filepath)
|
||||||
@@ -306,7 +366,7 @@ def download_media(media_id):
|
|||||||
media.downloaded_audio_codec = cformat['acodec']
|
media.downloaded_audio_codec = cformat['acodec']
|
||||||
if cformat['vcodec']:
|
if cformat['vcodec']:
|
||||||
# Combined
|
# Combined
|
||||||
media.downloaded_format = vformat['format']
|
media.downloaded_format = cformat['format']
|
||||||
media.downloaded_height = cformat['height']
|
media.downloaded_height = cformat['height']
|
||||||
media.downloaded_width = cformat['width']
|
media.downloaded_width = cformat['width']
|
||||||
media.downloaded_video_codec = cformat['vcodec']
|
media.downloaded_video_codec = cformat['vcodec']
|
||||||
@@ -317,11 +377,13 @@ def download_media(media_id):
|
|||||||
media.save()
|
media.save()
|
||||||
# If selected, copy the thumbnail over as well
|
# If selected, copy the thumbnail over as well
|
||||||
if media.source.copy_thumbnails and media.thumb:
|
if media.source.copy_thumbnails and media.thumb:
|
||||||
barefilepath, fileext = os.path.splitext(filepath)
|
|
||||||
thumbpath = f'{barefilepath}.jpg'
|
|
||||||
log.info(f'Copying media thumbnail from: {media.thumb.path} '
|
log.info(f'Copying media thumbnail from: {media.thumb.path} '
|
||||||
f'to: {thumbpath}')
|
f'to: {media.thumbpath}')
|
||||||
copyfile(media.thumb.path, thumbpath)
|
copyfile(media.thumb.path, media.thumbpath)
|
||||||
|
# If selected, write an NFO file
|
||||||
|
if media.source.write_nfo:
|
||||||
|
log.info(f'Writing media NFO file to: to: {media.nfopath}')
|
||||||
|
write_text_file(media.nfopath, media.nfoxml)
|
||||||
# Schedule a task to update media servers
|
# Schedule a task to update media servers
|
||||||
for mediaserver in MediaServer.objects.all():
|
for mediaserver in MediaServer.objects.all():
|
||||||
log.info(f'Scheduling media server updates')
|
log.info(f'Scheduling media server updates')
|
||||||
|
|||||||
@@ -11,18 +11,28 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<td>{yyyymmdd}</td>
|
<td>{yyyymmdd}</td>
|
||||||
<td>Media publish date in YYYYMMDD</td>
|
<td>Media publish date in YYYYMMDD</td>
|
||||||
<td>20210101</td>
|
<td>20210131</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{yyyy_mm_dd}</td>
|
<td>{yyyy_mm_dd}</td>
|
||||||
<td>Media publish date in YYYY-MM-DD</td>
|
<td>Media publish date in YYYY-MM-DD</td>
|
||||||
<td>2021-01-01</td>
|
<td>2021-01-31</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{yyyy}</td>
|
<td>{yyyy}</td>
|
||||||
<td>Media publish year in YYYY</td>
|
<td>Media publish year in YYYY</td>
|
||||||
<td>2021</td>
|
<td>2021</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>{mm}</td>
|
||||||
|
<td>Media publish month in MM</td>
|
||||||
|
<td>01</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>{dd}</td>
|
||||||
|
<td>Media publish day in DD</td>
|
||||||
|
<td>31</td>
|
||||||
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{source}</td>
|
<td>{source}</td>
|
||||||
<td>Lower case source name, max 80 chars</td>
|
<td>Lower case source name, max 80 chars</td>
|
||||||
@@ -53,6 +63,11 @@
|
|||||||
<td>Media format string</td>
|
<td>Media format string</td>
|
||||||
<td>720p-avc1-mp4a</td>
|
<td>720p-avc1-mp4a</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>{playlist_title}</td>
|
||||||
|
<td>Playlist title of media, if it's in a playlist</td>
|
||||||
|
<td>Some Playlist</td>
|
||||||
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{ext}</td>
|
<td>{ext}</td>
|
||||||
<td>File extension</td>
|
<td>File extension</td>
|
||||||
@@ -71,7 +71,7 @@
|
|||||||
<div class="collection">
|
<div class="collection">
|
||||||
{% for media in latest_downloads %}
|
{% for media in latest_downloads %}
|
||||||
<a href="{% url 'sync:media-item' pk=media.pk %}" class="collection-item">
|
<a href="{% url 'sync:media-item' pk=media.pk %}" class="collection-item">
|
||||||
<div class="truncate"><strong>{{ media.name }}</strong> ({{ media.source }})</div>
|
<div class="truncate"><strong>{{ media.name }}</strong></div>
|
||||||
<div class="truncate"><strong>{{ media.download_date|timesince:now }}</strong> ago from "{{ media.source.name }}"</div>
|
<div class="truncate"><strong>{{ media.download_date|timesince:now }}</strong> ago from "{{ media.source.name }}"</div>
|
||||||
</a>
|
</a>
|
||||||
{% empty %}
|
{% empty %}
|
||||||
@@ -89,7 +89,7 @@
|
|||||||
{% for media in largest_downloads %}
|
{% for media in largest_downloads %}
|
||||||
<a href="{% url 'sync:media-item' pk=media.pk %}" class="collection-item">
|
<a href="{% url 'sync:media-item' pk=media.pk %}" class="collection-item">
|
||||||
<div class="truncate">{{ media.name }}</div>
|
<div class="truncate">{{ media.name }}</div>
|
||||||
<div class="truncate"><strong>{{ media.downloaded_filesize|filesizeformat }}</strong>{% if media.downloaded_format %} in {{ media.downloaded_format }}{% endif %}</div>
|
<div class="truncate"><strong>{{ media.downloaded_filesize|filesizeformat }}</strong>{% if media.downloaded_format %} in {{ media.downloaded_format }}{% endif %} from "{{ media.source.name }}"</div>
|
||||||
</a>
|
</a>
|
||||||
{% empty %}
|
{% empty %}
|
||||||
<span class="collection-item">No media has been downloaded.</span>
|
<span class="collection-item">No media has been downloaded.</span>
|
||||||
@@ -101,7 +101,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
<h2 class="truncate">Runtime infomation</h2>
|
<h2 class="truncate">Runtime information</h2>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
@@ -123,6 +123,10 @@
|
|||||||
<td class="hide-on-small-only">Downloads directory</td>
|
<td class="hide-on-small-only">Downloads directory</td>
|
||||||
<td><span class="hide-on-med-and-up">Downloads directory<br></span><strong>{{ downloads_dir }}</strong></td>
|
<td><span class="hide-on-med-and-up">Downloads directory<br></span><strong>{{ downloads_dir }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Database connection used by TubeSync">
|
||||||
|
<td class="hide-on-small-only">Database</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Database<br></span><strong>{{ database_connection }}</strong></td>
|
||||||
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -9,6 +9,12 @@
|
|||||||
{% if media.title %}<h2 class="truncate"><strong>{{ media.title }}</strong></h2>{% endif %}
|
{% if media.title %}<h2 class="truncate"><strong>{{ media.title }}</strong></h2>{% endif %}
|
||||||
<p class="truncate"><strong><a href="{{ media.url }}" target="_blank"><i class="fas fa-link"></i> {{ media.url }}</a></strong></p>
|
<p class="truncate"><strong><a href="{{ media.url }}" target="_blank"><i class="fas fa-link"></i> {{ media.url }}</a></strong></p>
|
||||||
<p class="truncate">Downloading to: <strong>{{ media.source.directory_path }}</strong></p>
|
<p class="truncate">Downloading to: <strong>{{ media.source.directory_path }}</strong></p>
|
||||||
|
{% if download_state == 'downloaded' %}
|
||||||
|
<video controls style="width: 100%">
|
||||||
|
<source src="{% url 'sync:media-content' pk=media.pk %}">
|
||||||
|
</video>
|
||||||
|
<p class="truncate"><a href="{% url 'sync:media-content' pk=media.pk %}" download="{{ media.filename }}">Download</a></p>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
|
{% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
|
||||||
@@ -64,8 +70,14 @@
|
|||||||
<td class="hide-on-small-only">Fallback</td>
|
<td class="hide-on-small-only">Fallback</td>
|
||||||
<td><span class="hide-on-med-and-up">Fallback<br></span><strong>{{ media.source.get_fallback_display }}</strong></td>
|
<td><span class="hide-on-med-and-up">Fallback<br></span><strong>{{ media.source.get_fallback_display }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
{% if not media.source.download_media %}
|
||||||
|
<tr title="Is media marked to be downloaded at the source?">
|
||||||
|
<td class="hide-on-small-only">Source download?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Source download?<br></span><strong>{% if media.source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
{% if media.skip %}
|
{% if media.skip %}
|
||||||
<tr title="Has the media been downloaded?">
|
<tr title="Is the media marked to be skipped?">
|
||||||
<td class="hide-on-small-only">Skipping?</td>
|
<td class="hide-on-small-only">Skipping?</td>
|
||||||
<td><span class="hide-on-med-and-up">Skipping?<br></span><strong>{% if media.skip %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Skipping?<br></span><strong>{% if media.skip %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -109,7 +121,7 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
<tr title="Can the media be downloaded?">
|
<tr title="Can the media be downloaded?">
|
||||||
<td class="hide-on-small-only">Can download?</td>
|
<td class="hide-on-small-only">Can download?</td>
|
||||||
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if youtube_dl_format %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if media.can_download %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<tr title="The available media formats">
|
<tr title="The available media formats">
|
||||||
|
|||||||
@@ -4,9 +4,23 @@
|
|||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12 m6">
|
||||||
<h1 class="truncate">Media</h1>
|
<h1 class="truncate">Media</h1>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="col s12 m3">
|
||||||
|
{% if show_skipped %}
|
||||||
|
<a href="{% url 'sync:media' %}{% if source %}?filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye-slash"></i> Hide skipped media</a>
|
||||||
|
{% else %}
|
||||||
|
<a href="{% url 'sync:media' %}?show_skipped=yes{% if source %}&filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye"></i> Show skipped media</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="col s12 m3">
|
||||||
|
{% if only_skipped %}
|
||||||
|
<a href="{% url 'sync:media' %}{% if source %}?filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye-slash"></i> Only skipped media</a>
|
||||||
|
{% else %}
|
||||||
|
<a href="{% url 'sync:media' %}?only_skipped=yes{% if source %}&filter={{ source.pk }}{% endif %}" class="btn"><i class="far fa-eye"></i> Only skipped media</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% include 'infobox.html' with message=message %}
|
{% include 'infobox.html' with message=message %}
|
||||||
<div class="row no-margin-bottom">
|
<div class="row no-margin-bottom">
|
||||||
@@ -24,8 +38,12 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
{% if m.skip %}
|
{% if m.skip %}
|
||||||
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
|
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
|
||||||
|
{% elif not m.source.download_media %}
|
||||||
|
<span class="error-text"><i class="fas fa-times" title="Not downloading media for this source"></i> Disabled at source</span>
|
||||||
|
{% elif not m.has_metadata %}
|
||||||
|
<i class="far fa-clock" title="Waiting for metadata"></i> Fetching metadata
|
||||||
{% elif m.can_download %}
|
{% elif m.can_download %}
|
||||||
<i class="far fa-clock" title="Waiting to download or downloading"></i> {{ m.published|date:'Y-m-d' }}
|
<i class="far fa-clock" title="Waiting to download or downloading"></i> Downloading
|
||||||
{% else %}
|
{% else %}
|
||||||
<span class="error-text"><i class="fas fa-exclamation-triangle" title="No matching formats to download"></i> No matching formats</span>
|
<span class="error-text"><i class="fas fa-exclamation-triangle" title="No matching formats to download"></i> No matching formats</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -44,5 +62,5 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk %}
|
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -25,7 +25,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
{% include 'mediaformatvars.html' %}
|
{% include 'sync/_mediaformatvars.html' %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12">
|
<div class="col s12">
|
||||||
{% include 'mediaformatvars.html' %}
|
{% include 'sync/_mediaformatvars.html' %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -51,10 +51,20 @@
|
|||||||
<td class="hide-on-small-only">Example filename</td>
|
<td class="hide-on-small-only">Example filename</td>
|
||||||
<td><span class="hide-on-med-and-up">Example filename<br></span><strong>{{ source.get_example_media_format }}</strong></td>
|
<td><span class="hide-on-med-and-up">Example filename<br></span><strong>{{ source.get_example_media_format }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
{% if source.download_cap > 0 %}
|
||||||
|
<tr title="Do not download videos older than this cap">
|
||||||
|
<td class="hide-on-small-only">Download cap</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Download cap<br></span><strong>{{ source.get_download_cap_display }}</strong></td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
<tr title="Schedule of how often to index the source for new media">
|
<tr title="Schedule of how often to index the source for new media">
|
||||||
<td class="hide-on-small-only">Index schedule</td>
|
<td class="hide-on-small-only">Index schedule</td>
|
||||||
<td><span class="hide-on-med-and-up">Index schedule<br></span><strong>{{ source.get_index_schedule_display }}</strong></td>
|
<td><span class="hide-on-med-and-up">Index schedule<br></span><strong>{{ source.get_index_schedule_display }}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Download media from this source">
|
||||||
|
<td class="hide-on-small-only">Download media?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Download media?<br></span><strong>{% if source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
<tr title="When then source was created locally in TubeSync">
|
<tr title="When then source was created locally in TubeSync">
|
||||||
<td class="hide-on-small-only">Created</td>
|
<td class="hide-on-small-only">Created</td>
|
||||||
<td><span class="hide-on-med-and-up">Created<br></span><strong>{{ source.created|date:'Y-m-d H:i:s' }}</strong></td>
|
<td><span class="hide-on-med-and-up">Created<br></span><strong>{{ source.created|date:'Y-m-d H:i:s' }}</strong></td>
|
||||||
@@ -97,6 +107,14 @@
|
|||||||
<td class="hide-on-small-only">Copy thumbnails?</td>
|
<td class="hide-on-small-only">Copy thumbnails?</td>
|
||||||
<td><span class="hide-on-med-and-up">Copy thumbnails?<br></span><strong>{% if source.copy_thumbnails %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
<td><span class="hide-on-med-and-up">Copy thumbnails?<br></span><strong>{% if source.copy_thumbnails %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr title="Should an NFO file be written with the media?">
|
||||||
|
<td class="hide-on-small-only">Write NFO?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Write NFO?<br></span><strong>{% if source.write_nfo %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
|
<tr title="Should a JSON file be written with the media?">
|
||||||
|
<td class="hide-on-small-only">Write JSON?</td>
|
||||||
|
<td><span class="hide-on-med-and-up">Write JSON?<br></span><strong>{% if source.write_json %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||||
|
</tr>
|
||||||
{% if source.delete_old_media and source.days_to_keep > 0 %}
|
{% if source.delete_old_media and source.days_to_keep > 0 %}
|
||||||
<tr title="Days after which your media from this source will be locally deleted">
|
<tr title="Days after which your media from this source will be locally deleted">
|
||||||
<td class="hide-on-small-only">Delete old media</td>
|
<td class="hide-on-small-only">Delete old media</td>
|
||||||
|
|||||||
@@ -10,10 +10,13 @@
|
|||||||
</div>
|
</div>
|
||||||
{% include 'infobox.html' with message=message %}
|
{% include 'infobox.html' with message=message %}
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col s12 l6 margin-bottom">
|
<div class="col m12 xl4 margin-bottom">
|
||||||
<a href="{% url 'sync:validate-source' source_type='youtube-channel' %}" class="btn">Add a YouTube channel <i class="fab fa-youtube"></i></a>
|
<a href="{% url 'sync:validate-source' source_type='youtube-channel' %}" class="btn">Add a YouTube channel <i class="fab fa-youtube"></i></a>
|
||||||
</div>
|
</div>
|
||||||
<div class="col s12 l6 margin-bottom">
|
<div class="col m12 xl4 margin-bottom">
|
||||||
|
<a href="{% url 'sync:validate-source' source_type='youtube-channel-id' %}" class="btn">Add a YouTube channel by ID <i class="fab fa-youtube"></i></a>
|
||||||
|
</div>
|
||||||
|
<div class="col m12 xl4 margin-bottom">
|
||||||
<a href="{% url 'sync:validate-source' source_type='youtube-playlist' %}" class="btn">Add a YouTube playlist <i class="fab fa-youtube"></i></a>
|
<a href="{% url 'sync:validate-source' source_type='youtube-playlist' %}" class="btn">Add a YouTube playlist <i class="fab fa-youtube"></i></a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -27,7 +30,7 @@
|
|||||||
{% if source.has_failed %}
|
{% if source.has_failed %}
|
||||||
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
|
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
|
||||||
{% else %}
|
{% else %}
|
||||||
<strong>{{ source.media_count }}</strong> media items{% if source.delete_old_media and source.days_to_keep > 0 %}, keep {{ source.days_to_keep }} days of media{% endif %}
|
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</a>
|
</a>
|
||||||
{% empty %}
|
{% empty %}
|
||||||
|
|||||||