Compare commits

..

No commits in common. "main" and "v0.12.0" have entirely different histories.

49 changed files with 148 additions and 7812 deletions

View File

@ -4,10 +4,12 @@ env:
IMAGE_NAME: tubesync IMAGE_NAME: tubesync
on: on:
workflow_dispatch:
push: push:
branches: branches:
- main - main
pull_request:
branches:
- main
jobs: jobs:
test: test:

4
.gitignore vendored
View File

@ -1,4 +1,3 @@
.DS_Store
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
@ -131,6 +130,3 @@ dmypy.json
# Pyre type checker # Pyre type checker
.pyre/ .pyre/
Pipfile.lock
.vscode/launch.json

View File

@ -1,17 +1,16 @@
FROM debian:bookworm-slim FROM debian:bullseye-slim
ARG TARGETPLATFORM ARG TARGETPLATFORM
ARG S6_VERSION="3.1.5.0" ARG S6_VERSION="3.1.2.1"
ARG FFMPEG_DATE="autobuild-2023-11-29-14-19" ARG FFMPEG_DATE="autobuild-2023-01-03-12-55"
ARG FFMPEG_VERSION="112875-g47e214245b" ARG FFMPEG_VERSION="109474-gc94988a781"
ENV DEBIAN_FRONTEND="noninteractive" \ ENV DEBIAN_FRONTEND="noninteractive" \
HOME="/root" \ HOME="/root" \
LANGUAGE="en_US.UTF-8" \ LANGUAGE="en_US.UTF-8" \
LANG="en_US.UTF-8" \ LANG="en_US.UTF-8" \
LC_ALL="en_US.UTF-8" \ LC_ALL="en_US.UTF-8" \
TERM="xterm" \ TERM="xterm"
S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0"
# Install third party software # Install third party software
RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \ RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
@ -19,22 +18,22 @@ RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/arm64") echo "aarch64" ;; \ "linux/arm64") echo "aarch64" ;; \
*) echo "" ;; esac) && \ *) echo "" ;; esac) && \
export S6_ARCH_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \ export S6_ARCH_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "65d0d0f353d2ff9d0af202b268b4bf53a9948a5007650854855c729289085739" ;; \ "linux/amd64") echo "6019b6b06cfdbb1d1cd572d46b9b158a4904fd19ca59d374de4ddaaa6a3727d5" ;; \
"linux/arm64") echo "3fbd14201473710a592b2189e81f00f3c8998e96d34f16bd2429c35d1bc36d00" ;; \ "linux/arm64") echo "e73f9a021b64f88278830742149c14ef8a52331102881ba025bf32a66a0e7c78" ;; \
*) echo "" ;; esac) && \ *) echo "" ;; esac) && \
export S6_DOWNLOAD_ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \ export S6_DOWNLOAD_ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-x86_64.tar.xz" ;; \ "linux/amd64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-x86_64.tar.xz" ;; \
"linux/arm64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-aarch64.tar.xz" ;; \ "linux/arm64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-aarch64.tar.xz" ;; \
*) echo "" ;; esac) && \ *) echo "" ;; esac) && \
export FFMPEG_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \ export FFMPEG_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "36bac8c527bf390603416f749ab0dd860142b0a66f0865b67366062a9c286c8b" ;; \ "linux/amd64") echo "ed9059668e4a6dac9bde122a775f52ad08cbb90df3658f8c1e328477c13c242e" ;; \
"linux/arm64") echo "8f36e45d99d2367a5c0c220ee3164fa48f4f0cec35f78204ccced8dc303bfbdc" ;; \ "linux/arm64") echo "dd1375bd351d38ea1cc3efd68a998699366e28bd9b90df65d11af2b9121746b7" ;; \
*) echo "" ;; esac) && \ *) echo "" ;; esac) && \
export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \ export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
"linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linux64-gpl.tar.xz" ;; \ "linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linux64-gpl.tar.xz" ;; \
"linux/arm64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linuxarm64-gpl.tar.xz" ;; \ "linux/arm64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linuxarm64-gpl.tar.xz" ;; \
*) echo "" ;; esac) && \ *) echo "" ;; esac) && \
export S6_NOARCH_EXPECTED_SHA256="fd80c231e8ae1a0667b7ae2078b9ad0e1269c4d117bf447a4506815a700dbff3" && \ export S6_NOARCH_EXPECTED_SHA256="cee89d3eeabdfe15239b2c5c3581d9352d2197d4fd23bba3f1e64bf916ccf496" && \
export S6_DOWNLOAD_NOARCH="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-noarch.tar.xz" && \ export S6_DOWNLOAD_NOARCH="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-noarch.tar.xz" && \
echo "Building for arch: ${ARCH}|${ARCH44}, downloading S6 from: ${S6_DOWNLOAD}}, expecting S6 SHA256: ${S6_EXPECTED_SHA256}" && \ echo "Building for arch: ${ARCH}|${ARCH44}, downloading S6 from: ${S6_DOWNLOAD}}, expecting S6 SHA256: ${S6_EXPECTED_SHA256}" && \
set -x && \ set -x && \
@ -83,30 +82,30 @@ RUN set -x && \
apt-get -y install nginx-light && \ apt-get -y install nginx-light && \
apt-get -y --no-install-recommends install \ apt-get -y --no-install-recommends install \
python3 \ python3 \
python3-dev \ python3-setuptools \
python3-pip \ python3-pip \
python3-wheel \ python3-dev \
pipenv \
gcc \ gcc \
g++ \ g++ \
make \ make \
pkgconf \
default-libmysqlclient-dev \ default-libmysqlclient-dev \
libmariadb3 \ libmariadb3 \
postgresql-common \ postgresql-common \
libpq-dev \ libpq-dev \
libpq5 \ libpq5 \
libjpeg62-turbo \ libjpeg62-turbo \
libwebp7 \ libwebp6 \
libjpeg-dev \ libjpeg-dev \
zlib1g-dev \ zlib1g-dev \
libwebp-dev \ libwebp-dev \
redis-server && \ redis-server && \
# Install pipenv
pip3 --disable-pip-version-check install wheel pipenv && \
# Create a 'app' user which the application will run as # Create a 'app' user which the application will run as
groupadd app && \ groupadd app && \
useradd -M -d /app -s /bin/false -g app app && \ useradd -M -d /app -s /bin/false -g app app && \
# Install non-distro packages # Install non-distro packages
PIPENV_VERBOSITY=64 pipenv install --system --skip-lock && \ pipenv install --system --skip-lock && \
# Make absolutely sure we didn't accidentally bundle a SQLite dev database # Make absolutely sure we didn't accidentally bundle a SQLite dev database
rm -rf /app/db.sqlite3 && \ rm -rf /app/db.sqlite3 && \
# Run any required app commands # Run any required app commands
@ -120,6 +119,7 @@ RUN set -x && \
# Clean up # Clean up
rm /app/Pipfile && \ rm /app/Pipfile && \
pipenv --clear && \ pipenv --clear && \
pip3 --disable-pip-version-check uninstall -y pipenv wheel virtualenv && \
apt-get -y autoremove --purge \ apt-get -y autoremove --purge \
python3-pip \ python3-pip \
python3-dev \ python3-dev \

View File

@ -29,13 +29,5 @@ runcontainer:
$(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image) $(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image)
stopcontainer:
$(docker) stop $(name)
test: build test: build
cd tubesync && $(python) manage.py test --verbosity=2 && cd .. cd tubesync && $(python) manage.py test --verbosity=2 && cd ..
shell:
cd tubesync && $(python) manage.py shell

View File

@ -4,7 +4,6 @@ url = "https://pypi.org/simple"
verify_ssl = true verify_ssl = true
[dev-packages] [dev-packages]
autopep8 = "*"
[packages] [packages]
django = "~=3.2" django = "~=3.2"
@ -16,10 +15,10 @@ gunicorn = "*"
django-compressor = "*" django-compressor = "*"
httptools = "*" httptools = "*"
django-background-tasks = "*" django-background-tasks = "*"
requests = "*"
django-basicauth = "*" django-basicauth = "*"
psycopg2-binary = "*" psycopg2-binary = "*"
mysqlclient = "*" mysqlclient = "*"
yt-dlp = "*" yt-dlp = "*"
redis = "*" redis = "*"
hiredis = "*" hiredis = "*"
requests = {extras = ["socks"], version = "*"}

View File

@ -241,7 +241,6 @@ and less common features:
* [Reset tasks from the command line](https://github.com/meeb/tubesync/blob/main/docs/reset-tasks.md) * [Reset tasks from the command line](https://github.com/meeb/tubesync/blob/main/docs/reset-tasks.md)
* [Using PostgreSQL, MySQL or MariaDB as database backends](https://github.com/meeb/tubesync/blob/main/docs/other-database-backends.md) * [Using PostgreSQL, MySQL or MariaDB as database backends](https://github.com/meeb/tubesync/blob/main/docs/other-database-backends.md)
* [Using cookies](https://github.com/meeb/tubesync/blob/main/docs/using-cookies.md) * [Using cookies](https://github.com/meeb/tubesync/blob/main/docs/using-cookies.md)
* [Reset metadata](https://github.com/meeb/tubesync/blob/main/docs/reset-metadata.md)
# Warnings # Warnings
@ -351,10 +350,6 @@ etc.). Configuration of this is beyond the scope of this README.
Just `amd64` for the moment. Others may be made available if there is demand. Just `amd64` for the moment. Others may be made available if there is demand.
### The pipenv install fails with "Locking failed"!
Make sure that you have `mysql_config` or `mariadb_config` available, as required by the python module `mysqlclient`. On Debian-based systems this is usually found in the package `libmysqlclient-dev`
# Advanced configuration # Advanced configuration
@ -362,20 +357,19 @@ There are a number of other environment variables you can set. These are, mostly
**NOT** required to be set in the default container installation, they are really only **NOT** required to be set in the default container installation, they are really only
useful if you are manually installing TubeSync in some other environment. These are: useful if you are manually installing TubeSync in some other environment. These are:
| Name | What | Example | | Name | What | Example |
| --------------------------- | ------------------------------------------------------------ | ------------------------------------ | | ------------------------ | ------------------------------------------------------------ | ------------------------------------ |
| DJANGO_SECRET_KEY | Django's SECRET_KEY | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l | | DJANGO_SECRET_KEY | Django's SECRET_KEY | YJySXnQLB7UVZw2dXKDWxI5lEZaImK6l |
| DJANGO_URL_PREFIX | Run TubeSync in a sub-URL on the web server | /somepath/ | | DJANGO_URL_PREFIX | Run TubeSync in a sub-URL on the web server | /somepath/ |
| TUBESYNC_DEBUG | Enable debugging | True | | TUBESYNC_DEBUG | Enable debugging | True |
| TUBESYNC_WORKERS | Number of background workers, default is 2, max allowed is 8 | 2 | | TUBESYNC_WORKERS | Number of background workers, default is 2, max allowed is 8 | 2 |
| TUBESYNC_HOSTS | Django's ALLOWED_HOSTS, defaults to `*` | tubesync.example.com,otherhost.com | | TUBESYNC_HOSTS | Django's ALLOWED_HOSTS, defaults to `*` | tubesync.example.com,otherhost.com |
| TUBESYNC_RESET_DOWNLOAD_DIR | Toggle resetting `/downloads` permissions, defaults to True | True | GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 |
| GUNICORN_WORKERS | Number of gunicorn workers to spawn | 3 | | LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 |
| LISTEN_HOST | IP address for gunicorn to listen on | 127.0.0.1 | | LISTEN_PORT | Port number for gunicorn to listen on | 8080 |
| LISTEN_PORT | Port number for gunicorn to listen on | 8080 | | HTTP_USER | Sets the username for HTTP basic authentication | some-username |
| HTTP_USER | Sets the username for HTTP basic authentication | some-username | | HTTP_PASS | Sets the password for HTTP basic authentication | some-secure-password |
| HTTP_PASS | Sets the password for HTTP basic authentication | some-secure-password | | DATABASE_CONNECTION | Optional external database connection details | mysql://user:pass@host:port/database |
| DATABASE_CONNECTION | Optional external database connection details | mysql://user:pass@host:port/database |
# Manual, non-containerised, installation # Manual, non-containerised, installation

View File

@ -11,6 +11,8 @@ chown -R app:app /run/app
chmod -R 0700 /run/app chmod -R 0700 /run/app
chown -R app:app /config chown -R app:app /config
chmod -R 0755 /config chmod -R 0755 /config
chown -R app:app /downloads
chmod -R 0755 /downloads
chown -R root:app /app chown -R root:app /app
chmod -R 0750 /app chmod -R 0750 /app
chown -R app:app /app/common/static chown -R app:app /app/common/static
@ -20,15 +22,6 @@ chmod -R 0750 /app/static
find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \; find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \;
chmod 0755 /app/healthcheck.py chmod 0755 /app/healthcheck.py
# Optionally reset the download dir permissions
TUBESYNC_RESET_DOWNLOAD_DIR="${TUBESYNC_RESET_DOWNLOAD_DIR:-True}"
if [ "$TUBESYNC_RESET_DOWNLOAD_DIR" == "True" ]
then
echo "TUBESYNC_RESET_DOWNLOAD_DIR=True, Resetting /downloads directory permissions"
chown -R app:app /downloads
chmod -R 0755 /downloads
fi
# Run migrations # Run migrations
exec s6-setuidgid app \ exec s6-setuidgid app \
/usr/bin/python3 /app/manage.py migrate /usr/bin/python3 /app/manage.py migrate

View File

@ -0,0 +1 @@
60000

View File

@ -24,7 +24,7 @@ $ docker exec -i tubesync python3 /app/manage.py dumpdata > some-file.json
Then change you database backend over, then use Then change you database backend over, then use
```bash ```bash
$ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata - --format=json $ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata --format=json -
``` ```
As detailed in the Django documentation: As detailed in the Django documentation:
@ -78,55 +78,3 @@ entry in the container or stdout logs:
If you see a line similar to the above and the web interface loads, congratulations, If you see a line similar to the above and the web interface loads, congratulations,
you are now using an external database server for your TubeSync data! you are now using an external database server for your TubeSync data!
## Database Compression (For MariaDB)
With a lot of media files the `sync_media` table grows in size quickly.
You can save space using column compression using the following steps while using MariaDB:
1. Stop tubesync
2. Execute `ALTER TABLE sync_media MODIFY metadata LONGTEXT COMPRESSED;` on database tubesync
3. Start tunesync and confirm the connection still works.
## Docker Compose
If you're using Docker Compose and simply want to connect to another container with
the DB for the performance benefits, a configuration like this would be enough:
```
tubesync-db:
image: postgres:15.2
container_name: tubesync-db
restart: unless-stopped
volumes:
- /<path/to>/init.sql:/docker-entrypoint-initdb.d/init.sql
- /<path/to>/tubesync-db:/var/lib/postgresql/data
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=testpassword
tubesync:
image: ghcr.io/meeb/tubesync:latest
container_name: tubesync
restart: unless-stopped
ports:
- 4848:4848
volumes:
- /<path/to>/tubesync/config:/config
- /<path/to>/YouTube:/downloads
environment:
- DATABASE_CONNECTION=postgresql://postgres:testpassword@tubesync-db:5432/tubesync
depends_on:
- tubesync-db
```
Note that an `init.sql` file is needed to initialize the `tubesync`
database before it can be written to. This file should contain:
```
CREATE DATABASE tubesync;
```
Then it must be mapped to `/docker-entrypoint-initdb.d/init.sql` for it
to be executed on first startup of the container. See the `tubesync-db`
volume mapping above for how to do this.

View File

@ -1,30 +0,0 @@
# TubeSync
## Advanced usage guide - reset media metadata from the command line
This command allows you to reset all media item metadata. You might want to use
this if you have a lot of media items with invalid metadata and you want to
wipe it which triggers the metadata to be redownloaded.
## Requirements
You have added some sources and media
## Steps
### 1. Run the reset tasks command
Execute the following Django command:
`./manage.py reset-metadata`
When deploying TubeSync inside a container, you can execute this with:
`docker exec -ti tubesync python3 /app/manage.py reset-metadata`
This command will log what its doing to the terminal when you run it.
When this is run, new tasks will be immediately created so all your media
items will start downloading updated metadata straight away, any missing information
such as thumbnails will be redownloaded, etc.

View File

@ -1,14 +1,10 @@
import logging import logging
from django.conf import settings
logging_level = logging.DEBUG if settings.DEBUG else logging.INFO
log = logging.getLogger('tubesync') log = logging.getLogger('tubesync')
log.setLevel(logging_level) log.setLevel(logging.DEBUG)
ch = logging.StreamHandler() ch = logging.StreamHandler()
ch.setLevel(logging_level) ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s [%(name)s/%(levelname)s] %(message)s') formatter = logging.Formatter('%(asctime)s [%(name)s/%(levelname)s] %(message)s')
ch.setFormatter(formatter) ch.setFormatter(formatter)
log.addHandler(ch) log.addHandler(ch)

View File

@ -1,19 +1,19 @@
@font-face { @font-face {
font-family: 'roboto'; font-family: 'roboto-light';
src: url('../fonts/roboto/roboto-light.woff') format('woff'); src: url('../fonts/roboto/roboto-light.woff') format('woff');
font-weight: lighter; font-weight: normal;
font-style: normal; font-style: normal;
} }
@font-face { @font-face {
font-family: 'roboto'; font-family: 'roboto-regular';
src: url('../fonts/roboto/roboto-regular.woff') format('woff'); src: url('../fonts/roboto/roboto-regular.woff') format('woff');
font-weight: normal; font-weight: normal;
font-style: normal; font-style: normal;
} }
@font-face { @font-face {
font-family: 'roboto'; font-family: 'roboto-bold';
src: url('../fonts/roboto/roboto-bold.woff') format('woff'); src: url('../fonts/roboto/roboto-bold.woff') format('woff');
font-weight: bold; font-weight: bold;
font-style: normal; font-style: normal;

View File

@ -1,2 +1,2 @@
$font-family: 'roboto', Arial, Helvetica, sans-serif; $font-family: 'roboto-regular', Arial, Helvetica, sans-serif;
$font-size: 1.05rem; $font-size: 1.05rem;

View File

@ -65,7 +65,6 @@ readers do not read off random characters that represent icons */
.#{$fa-css-prefix}-arrows-alt-h:before { content: fa-content($fa-var-arrows-alt-h); } .#{$fa-css-prefix}-arrows-alt-h:before { content: fa-content($fa-var-arrows-alt-h); }
.#{$fa-css-prefix}-arrows-alt-v:before { content: fa-content($fa-var-arrows-alt-v); } .#{$fa-css-prefix}-arrows-alt-v:before { content: fa-content($fa-var-arrows-alt-v); }
.#{$fa-css-prefix}-artstation:before { content: fa-content($fa-var-artstation); } .#{$fa-css-prefix}-artstation:before { content: fa-content($fa-var-artstation); }
.#{$fa-css-prefix}-arrow-rotate-right:before { content: fa-content($fa-var-arrow-rotate-right); }
.#{$fa-css-prefix}-assistive-listening-systems:before { content: fa-content($fa-var-assistive-listening-systems); } .#{$fa-css-prefix}-assistive-listening-systems:before { content: fa-content($fa-var-assistive-listening-systems); }
.#{$fa-css-prefix}-asterisk:before { content: fa-content($fa-var-asterisk); } .#{$fa-css-prefix}-asterisk:before { content: fa-content($fa-var-asterisk); }
.#{$fa-css-prefix}-asymmetrik:before { content: fa-content($fa-var-asymmetrik); } .#{$fa-css-prefix}-asymmetrik:before { content: fa-content($fa-var-asymmetrik); }

View File

@ -80,7 +80,6 @@ $fa-var-arrow-right: \f061;
$fa-var-arrow-up: \f062; $fa-var-arrow-up: \f062;
$fa-var-arrows-alt: \f0b2; $fa-var-arrows-alt: \f0b2;
$fa-var-arrows-alt-h: \f337; $fa-var-arrows-alt-h: \f337;
$fa-var-arrow-rotate-right: \f01e;
$fa-var-arrows-alt-v: \f338; $fa-var-arrows-alt-v: \f338;
$fa-var-artstation: \f77a; $fa-var-artstation: \f77a;
$fa-var-assistive-listening-systems: \f2a2; $fa-var-assistive-listening-systems: \f2a2;

View File

@ -14,7 +14,7 @@
// Text Label Style // Text Label Style
+ span:not(.lever) { + span:not(.lever) {
position: relative; position: relative;
padding-left: 27px; padding-left: 35px;
cursor: pointer; cursor: pointer;
display: inline-block; display: inline-block;
height: 25px; height: 25px;

View File

@ -17,16 +17,3 @@ html {
visibility: visible; visibility: visible;
opacity: 1; opacity: 1;
} }
.flex-collection-container {
display: flex !important;
align-items: center;
}
.flex-grow {
flex-grow: 1;
}
.help-text > i {
padding-right: 6px;
}

View File

@ -3,7 +3,7 @@
<div class="col s12"> <div class="col s12">
<div class="pagination"> <div class="pagination">
{% for i in paginator.page_range %} {% for i in paginator.page_range %}
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}{% if only_skipped %}&only_skipped=yes{% endif %}">{{ i }}</a> <a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}">{{ i }}</a>
{% endfor %} {% endfor %}
</div> </div>
</div> </div>

View File

@ -1,109 +0,0 @@
from django.forms import MultipleChoiceField, CheckboxSelectMultiple, Field, TypedMultipleChoiceField
from django.db import models
from typing import Any, Optional, Dict
from django.utils.translation import gettext_lazy as _
# this is a form field!
class CustomCheckboxSelectMultiple(CheckboxSelectMultiple):
template_name = 'widgets/checkbox_select.html'
option_template_name = 'widgets/checkbox_option.html'
def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]:
ctx = super().get_context(name, value, attrs)['widget']
ctx["multipleChoiceProperties"] = []
for _group, options, _index in ctx["optgroups"]:
for option in options:
if not isinstance(value,str) and not isinstance(value,list) and ( option["value"] in value.selected_choices or ( value.allow_all and value.all_choice in value.selected_choices ) ):
checked = True
else:
checked = False
ctx["multipleChoiceProperties"].append({
"template_name": option["template_name"],
"type": option["type"],
"value": option["value"],
"label": option["label"],
"name": option["name"],
"checked": checked})
return { 'widget': ctx }
# this is a database field!
class CommaSepChoiceField(models.Field):
"Implements comma-separated storage of lists"
def __init__(self, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, *args, **kwargs):
self.separator = separator
self.possible_choices = possible_choices
self.selected_choices = []
self.allow_all = allow_all
self.all_label = all_label
self.all_choice = all_choice
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.separator != ",":
kwargs['separator'] = self.separator
kwargs['possible_choices'] = self.possible_choices
return name, path, args, kwargs
def db_type(self, connection):
return 'text'
def get_my_choices(self):
choiceArray = []
if self.possible_choices is None:
return choiceArray
if self.allow_all:
choiceArray.append((self.all_choice, _(self.all_label)))
for t in self.possible_choices:
choiceArray.append(t)
return choiceArray
def formfield(self, **kwargs):
# This is a fairly standard way to set up some defaults
# while letting the caller override them.
defaults = {'form_class': MultipleChoiceField,
'choices': self.get_my_choices,
'widget': CustomCheckboxSelectMultiple,
'label': '',
'required': False}
defaults.update(kwargs)
#del defaults.required
return super().formfield(**defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# Only include kwarg if it's not the default
if self.separator != ",":
kwargs['separator'] = self.separator
return name, path, args, kwargs
def from_db_value(self, value, expr, conn):
if value is None:
self.selected_choices = []
else:
self.selected_choices = value.split(",")
return self
def get_prep_value(self, value):
if value is None:
return ""
if not isinstance(value,list):
return ""
if self.all_choice not in value:
return ",".join(value)
else:
return self.all_choice
def get_text_for_value(self, val):
fval = [i for i in self.possible_choices if i[0] == val]
if len(fval) <= 0:
return []
else:
return fval[0][1]

View File

@ -1,19 +0,0 @@
from django.core.management.base import BaseCommand
from sync.models import Media
from common.logger import log
class Command(BaseCommand):
help = 'Resets all media item metadata'
def handle(self, *args, **options):
log.info('Resettings all media metadata...')
# Delete all metadata
Media.objects.update(metadata=None)
# Trigger the save signal on each media item
for item in Media.objects.all():
item.save()
log.info('Done')

View File

@ -53,8 +53,6 @@ def get_best_audio_format(media):
# If the format has a video stream, skip it # If the format has a video stream, skip it
if fmt['vcodec'] is not None: if fmt['vcodec'] is not None:
continue continue
if not fmt['acodec']:
continue
audio_formats.append(fmt) audio_formats.append(fmt)
audio_formats = list(reversed(sorted(audio_formats, key=lambda k: k['abr']))) audio_formats = list(reversed(sorted(audio_formats, key=lambda k: k['abr'])))
if not audio_formats: if not audio_formats:
@ -90,8 +88,6 @@ def get_best_video_format(media):
# If the format has an audio stream, skip it # If the format has an audio stream, skip it
if fmt['acodec'] is not None: if fmt['acodec'] is not None:
continue continue
if not fmt['vcodec']:
continue
if media.source.source_resolution.strip().upper() == fmt['format']: if media.source.source_resolution.strip().upper() == fmt['format']:
video_formats.append(fmt) video_formats.append(fmt)
# Check we matched some streams # Check we matched some streams

View File

@ -44,9 +44,7 @@ class PlexMediaServer(MediaServer):
'<p>The <strong>libraries</strong> is a comma-separated list of Plex ' '<p>The <strong>libraries</strong> is a comma-separated list of Plex '
'library or section IDs, you can find out how to get your library or ' 'library or section IDs, you can find out how to get your library or '
'section IDs <a href="https://support.plex.tv/articles/201242707-plex-' 'section IDs <a href="https://support.plex.tv/articles/201242707-plex-'
'media-scanner-via-command-line/#toc-1" target="_blank">here</a> or ' 'media-scanner-via-command-line/#toc-1" target="_blank">here</a>.</p>')
'<a href="https://www.plexopedia.com/plex-media-server/api/server/libraries/" '
'target="_blank">here</a></p>.')
def make_request(self, uri='/', params={}): def make_request(self, uri='/', params={}):
headers = {'User-Agent': 'TubeSync'} headers = {'User-Agent': 'TubeSync'}

View File

@ -1,23 +0,0 @@
# Generated by Django 3.2.17 on 2023-02-13 06:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0014_alter_media_media_file'),
]
operations = [
migrations.AddField(
model_name='media',
name='manual_skip',
field=models.BooleanField(db_index=True, default=False, help_text='Media marked as "skipped", won\' be downloaded', verbose_name='manual_skip'),
),
migrations.AlterField(
model_name='media',
name='skip',
field=models.BooleanField(db_index=True, default=False, help_text='INTERNAL FLAG - Media will be skipped and not downloaded', verbose_name='skip'),
),
]

View File

@ -1,34 +0,0 @@
# Generated by Django 3.2.18 on 2023-02-14 20:52
from django.db import migrations, models
import sync.models
class Migration(migrations.Migration):
dependencies = [
('sync', '0015_auto_20230213_0603'),
]
operations = [
migrations.AddField(
model_name='source',
name='embed_metadata',
field=models.BooleanField(default=False, help_text='Embed metadata from source into file', verbose_name='embed metadata'),
),
migrations.AddField(
model_name='source',
name='embed_thumbnail',
field=models.BooleanField(default=False, help_text='Embed thumbnail into the file', verbose_name='embed thumbnail'),
),
migrations.AddField(
model_name='source',
name='enable_sponsorblock',
field=models.BooleanField(default=True, help_text='Use SponsorBlock?', verbose_name='enable sponsorblock'),
),
migrations.AddField(
model_name='source',
name='sponsorblock_categories',
field=sync.models.CommaSepChoiceField(default='all', possible_choices=(('all', 'All'), ('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section'))),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 3.2.18 on 2023-02-20 02:23
from django.db import migrations
import sync.fields
class Migration(migrations.Migration):
dependencies = [
('sync', '0016_auto_20230214_2052'),
]
operations = [
migrations.AlterField(
model_name='source',
name='sponsorblock_categories',
field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', separator=''),
),
]

View File

@ -1,27 +0,0 @@
# Generated by pac
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0017_alter_source_sponsorblock_categories'),
]
operations = [
migrations.AddField(
model_name='source',
name='write_subtitles',
field=models.BooleanField(default=False, help_text='Download video subtitles', verbose_name='write subtitles'),
),
migrations.AddField(
model_name='source',
name='auto_subtitles',
field=models.BooleanField(default=False, help_text='Accept auto-generated subtitles', verbose_name='accept auto subtitles'),
),
migrations.AddField(
model_name='source',
name='sub_langs',
field=models.CharField(default='en', help_text='List of subtitles langs to download comma-separated. Example: en,fr',max_length=30),
),
]

View File

@ -1,17 +0,0 @@
# Generated by pac
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0018_source_subtitles'),
]
operations = [
migrations.AddField(
model_name='source',
name='delete_removed_media',
field=models.BooleanField(default=False, help_text='Delete media that is no longer on this playlist', verbose_name='delete removed media'),
),
]

View File

@ -1,29 +0,0 @@
# Generated by Django 3.2.22 on 2023-10-24 17:25
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0019_add_delete_removed_media'),
]
operations = [
migrations.AddField(
model_name='source',
name='filter_text',
field=models.CharField(blank=True, default='', help_text='Regex compatible filter string for video titles', max_length=100, verbose_name='filter string'),
),
migrations.AlterField(
model_name='source',
name='auto_subtitles',
field=models.BooleanField(default=False, help_text='Accept auto-generated subtitles', verbose_name='accept auto-generated subs'),
),
migrations.AlterField(
model_name='source',
name='sub_langs',
field=models.CharField(default='en', help_text='List of subtitles langs to download, comma-separated. Example: en,fr or all,-fr,-live_chat', max_length=30, validators=[django.core.validators.RegexValidator(message='Subtitle langs must be a comma-separated list of langs. example: en,fr or all,-fr,-live_chat', regex='^(\\-?[\\_\\.a-zA-Z]+,)*(\\-?[\\_\\.a-zA-Z]+){1}$')], verbose_name='subs langs'),
),
]

View File

@ -1,17 +0,0 @@
# Generated by pac
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sync', '0020_auto_20231024_1825'),
]
operations = [
migrations.AddField(
model_name='source',
name='delete_files_on_disk',
field=models.BooleanField(default=False, help_text='Delete files on disk when they are removed from TubeSync', verbose_name='delete files on disk'),
),
]

View File

@ -1,7 +1,6 @@
import os import os
import uuid import uuid
import json import json
import re
from xml.etree import ElementTree from xml.etree import ElementTree
from collections import OrderedDict from collections import OrderedDict
from datetime import datetime, timedelta from datetime import datetime, timedelta
@ -9,7 +8,6 @@ from pathlib import Path
from django.conf import settings from django.conf import settings
from django.db import models from django.db import models
from django.core.files.storage import FileSystemStorage from django.core.files.storage import FileSystemStorage
from django.core.validators import RegexValidator
from django.utils.text import slugify from django.utils.text import slugify
from django.utils import timezone from django.utils import timezone
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@ -18,13 +16,14 @@ from common.utils import clean_filename
from .youtube import (get_media_info as get_youtube_media_info, from .youtube import (get_media_info as get_youtube_media_info,
download_media as download_youtube_media) download_media as download_youtube_media)
from .utils import seconds_to_timestr, parse_media_format from .utils import seconds_to_timestr, parse_media_format
from .matching import (get_best_combined_format, get_best_audio_format, from .matching import (get_best_combined_format, get_best_audio_format,
get_best_video_format) get_best_video_format)
from .mediaservers import PlexMediaServer from .mediaservers import PlexMediaServer
from .fields import CommaSepChoiceField
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT), base_url='/media-data/') media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT), base_url='/media-data/')
class Source(models.Model): class Source(models.Model):
''' '''
A Source is a source of media. Currently, this is either a YouTube channel A Source is a source of media. Currently, this is either a YouTube channel
@ -107,43 +106,6 @@ class Source(models.Model):
EXTENSION_MKV = 'mkv' EXTENSION_MKV = 'mkv'
EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV) EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV)
# as stolen from: https://wiki.sponsor.ajay.app/w/Types / https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/postprocessor/sponsorblock.py
SPONSORBLOCK_CATEGORIES_CHOICES = (
('sponsor', 'Sponsor'),
('intro', 'Intermission/Intro Animation'),
('outro', 'Endcards/Credits'),
('selfpromo', 'Unpaid/Self Promotion'),
('preview', 'Preview/Recap'),
('filler', 'Filler Tangent'),
('interaction', 'Interaction Reminder'),
('music_offtopic', 'Non-Music Section'),
)
sponsorblock_categories = CommaSepChoiceField(
_(''),
possible_choices=SPONSORBLOCK_CATEGORIES_CHOICES,
all_choice='all',
allow_all=True,
all_label='(all options)',
default='all',
help_text=_('Select the sponsorblocks you want to enforce')
)
embed_metadata = models.BooleanField(
_('embed metadata'),
default=False,
help_text=_('Embed metadata from source into file')
)
embed_thumbnail = models.BooleanField(
_('embed thumbnail'),
default=False,
help_text=_('Embed thumbnail into the file')
)
enable_sponsorblock = models.BooleanField(
_('enable sponsorblock'),
default=True,
help_text=_('Use SponsorBlock?')
)
# Fontawesome icons used for the source on the front end # Fontawesome icons used for the source on the front end
ICONS = { ICONS = {
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>', SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
@ -284,23 +246,6 @@ class Source(models.Model):
help_text=_('If "delete old media" is ticked, the number of days after which ' help_text=_('If "delete old media" is ticked, the number of days after which '
'to automatically delete media') 'to automatically delete media')
) )
filter_text = models.CharField(
_('filter string'),
max_length=100,
default='',
blank=True,
help_text=_('Regex compatible filter string for video titles')
)
delete_removed_media = models.BooleanField(
_('delete removed media'),
default=False,
help_text=_('Delete media that is no longer on this playlist')
)
delete_files_on_disk = models.BooleanField(
_('delete files on disk'),
default=False,
help_text=_('Delete files on disk when they are removed from TubeSync')
)
source_resolution = models.CharField( source_resolution = models.CharField(
_('source resolution'), _('source resolution'),
max_length=8, max_length=8,
@ -364,30 +309,6 @@ class Source(models.Model):
help_text=_('Source has failed to index media') help_text=_('Source has failed to index media')
) )
write_subtitles = models.BooleanField(
_('write subtitles'),
default=False,
help_text=_('Download video subtitles')
)
auto_subtitles = models.BooleanField(
_('accept auto-generated subs'),
default=False,
help_text=_('Accept auto-generated subtitles')
)
sub_langs = models.CharField(
_('subs langs'),
max_length=30,
default='en',
help_text=_('List of subtitles langs to download, comma-separated. Example: en,fr or all,-fr,-live_chat'),
validators=[
RegexValidator(
regex=r"^(\-?[\_\.a-zA-Z]+,)*(\-?[\_\.a-zA-Z]+){1}$",
message=_('Subtitle langs must be a comma-separated list of langs. example: en,fr or all,-fr,-live_chat')
)
]
)
def __str__(self): def __str__(self):
return self.name return self.name
@ -525,7 +446,6 @@ class Source(models.Model):
'dd': now.strftime('%d'), 'dd': now.strftime('%d'),
'source': self.slugname, 'source': self.slugname,
'source_full': self.name, 'source_full': self.name,
'uploader': 'Some Channel Name',
'title': 'some-media-title-name', 'title': 'some-media-title-name',
'title_full': 'Some Media Title Name', 'title_full': 'Some Media Title Name',
'key': 'SoMeUnIqUiD', 'key': 'SoMeUnIqUiD',
@ -547,11 +467,6 @@ class Source(models.Model):
except Exception as e: except Exception as e:
return '' return ''
def is_regex_match(self, media_item_title):
if not self.filter_text:
return True
return bool(re.search(self.filter_text, media_item_title))
def index_media(self): def index_media(self):
''' '''
Index the media source returning a list of media metadata as dicts. Index the media source returning a list of media metadata as dicts.
@ -764,13 +679,7 @@ class Media(models.Model):
_('skip'), _('skip'),
db_index=True, db_index=True,
default=False, default=False,
help_text=_('INTERNAL FLAG - Media will be skipped and not downloaded') help_text=_('Media will be skipped and not downloaded')
)
manual_skip = models.BooleanField(
_('manual_skip'),
db_index=True,
default=False,
help_text=_('Media marked as "skipped", won\' be downloaded')
) )
downloaded = models.BooleanField( downloaded = models.BooleanField(
_('downloaded'), _('downloaded'),
@ -1040,7 +949,6 @@ class Media(models.Model):
'acodec': display_format['acodec'], 'acodec': display_format['acodec'],
'fps': display_format['fps'], 'fps': display_format['fps'],
'hdr': display_format['hdr'], 'hdr': display_format['hdr'],
'uploader': self.uploader,
} }
@property @property
@ -1105,7 +1013,7 @@ class Media(models.Model):
duration = self.loaded_metadata.get(field, 0) duration = self.loaded_metadata.get(field, 0)
try: try:
duration = int(duration) duration = int(duration)
except (TypeError, ValueError): except ValueError:
duration = 0 duration = 0
return duration return duration
@ -1228,29 +1136,6 @@ class Media(models.Model):
return False return False
return os.path.exists(self.media_file.path) return os.path.exists(self.media_file.path)
@property
def content_type(self):
if not self.downloaded:
return 'video/mp4'
vcodec = self.downloaded_video_codec
if vcodec is None:
acodec = self.downloaded_audio_codec
if acodec is None:
raise TypeError() # nothing here.
acodec = acodec.lower()
if acodec == "mp4a":
return "audio/mp4"
elif acodec == "opus":
return "audio/opus"
else:
# fall-fall-back.
return 'audio/ogg'
vcodec = vcodec.lower()
if vcodec == 'vp9':
return 'video/webm'
else:
return 'video/mp4'
@property @property
def nfoxml(self): def nfoxml(self):
''' '''
@ -1268,22 +1153,6 @@ class Media(models.Model):
showtitle.text = str(self.source.name).strip() showtitle.text = str(self.source.name).strip()
showtitle.tail = '\n ' showtitle.tail = '\n '
nfo.append(showtitle) nfo.append(showtitle)
# season = upload date year
season = nfo.makeelement('season', {})
if self.source.source_type == Source.SOURCE_TYPE_YOUTUBE_PLAYLIST:
# If it's a playlist, set season to 1
season.text = '1'
else:
# If it's not a playlist, set season to upload date year
season.text = str(self.upload_date.year) if self.upload_date else ''
season.tail = '\n '
nfo.append(season)
# episode = number of video in the year
episode = nfo.makeelement('episode', {})
episode_number = self.calculate_episode_number()
episode.text = str(episode_number) if episode_number else ''
episode.tail = '\n '
nfo.append(episode)
# ratings = media metadata youtube rating # ratings = media metadata youtube rating
value = nfo.makeelement('value', {}) value = nfo.makeelement('value', {})
value.text = str(self.rating) value.text = str(self.rating)
@ -1391,10 +1260,7 @@ class Media(models.Model):
f'no valid format available') f'no valid format available')
# Download the media with youtube-dl # Download the media with youtube-dl
download_youtube_media(self.url, format_str, self.source.extension, download_youtube_media(self.url, format_str, self.source.extension,
str(self.filepath), self.source.write_json, str(self.filepath), self.source.write_json)
self.source.sponsorblock_categories.selected_choices, self.source.embed_thumbnail,
self.source.embed_metadata, self.source.enable_sponsorblock,
self.source.write_subtitles, self.source.auto_subtitles,self.source.sub_langs )
# Return the download paramaters # Return the download paramaters
return format_str, self.source.extension return format_str, self.source.extension
@ -1408,19 +1274,6 @@ class Media(models.Model):
f'has no indexer') f'has no indexer')
return indexer(self.url) return indexer(self.url)
def calculate_episode_number(self):
if self.source.source_type == Source.SOURCE_TYPE_YOUTUBE_PLAYLIST:
sorted_media = Media.objects.filter(source=self.source)
else:
self_year = self.upload_date.year if self.upload_date else self.created.year
filtered_media = Media.objects.filter(source=self.source, published__year=self_year)
sorted_media = sorted(filtered_media, key=lambda x: (x.upload_date, x.key))
position_counter = 1
for media in sorted_media:
if media == self:
return position_counter
position_counter += 1
class MediaServer(models.Model): class MediaServer(models.Model):
''' '''

View File

@ -1,5 +1,4 @@
import os import os
import glob
from django.conf import settings from django.conf import settings
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete from django.db.models.signals import pre_save, post_save, pre_delete, post_delete
from django.dispatch import receiver from django.dispatch import receiver
@ -75,7 +74,6 @@ def source_pre_delete(sender, instance, **kwargs):
media.delete() media.delete()
@receiver(post_delete, sender=Source) @receiver(post_delete, sender=Source)
def source_post_delete(sender, instance, **kwargs): def source_post_delete(sender, instance, **kwargs):
# Triggered after a source is deleted # Triggered after a source is deleted
@ -95,17 +93,13 @@ def task_task_failed(sender, task_id, completed_task, **kwargs):
@receiver(post_save, sender=Media) @receiver(post_save, sender=Media)
def media_post_save(sender, instance, created, **kwargs): def media_post_save(sender, instance, created, **kwargs):
# If the media is skipped manually, bail.
if instance.manual_skip:
return
# Triggered after media is saved # Triggered after media is saved
cap_changed = False cap_changed = False
can_download_changed = False can_download_changed = False
# Reset the skip flag if the download cap has changed if the media has not # Reset the skip flag if the download cap has changed if the media has not
# already been downloaded # already been downloaded
if not instance.downloaded and instance.metadata: if not instance.downloaded:
max_cap_age = instance.source.download_cap_date max_cap_age = instance.source.download_cap_date
filter_text = instance.source.filter_text.strip()
published = instance.published published = instance.published
if not published: if not published:
if not instance.skip: if not instance.skip:
@ -119,20 +113,11 @@ def media_post_save(sender, instance, created, **kwargs):
else: else:
if max_cap_age: if max_cap_age:
if published > max_cap_age and instance.skip: if published > max_cap_age and instance.skip:
if filter_text: # Media was published after the cap date but is set to be skipped
if instance.source.is_regex_match(instance.title): log.info(f'Media: {instance.source} / {instance} has a valid '
log.info(f'Media: {instance.source} / {instance} has a valid ' f'publishing date, marking to be unskipped')
f'publishing date and title filter, marking to be unskipped') instance.skip = False
instance.skip = False cap_changed = True
cap_changed = True
else:
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date '
f'but failed the title filter match, already marked skipped')
else:
log.info(f'Media: {instance.source} / {instance} has a valid '
f'publishing date, marking to be unskipped')
instance.skip = False
cap_changed = True
elif published <= max_cap_age and not instance.skip: elif published <= max_cap_age and not instance.skip:
log.info(f'Media: {instance.source} / {instance} is too old for ' log.info(f'Media: {instance.source} / {instance} is too old for '
f'the download cap date, marking to be skipped') f'the download cap date, marking to be skipped')
@ -141,20 +126,10 @@ def media_post_save(sender, instance, created, **kwargs):
else: else:
if instance.skip: if instance.skip:
# Media marked to be skipped but source download cap removed # Media marked to be skipped but source download cap removed
if filter_text: log.info(f'Media: {instance.source} / {instance} has a valid '
if instance.source.is_regex_match(instance.title): f'publishing date, marking to be unskipped')
log.info(f'Media: {instance.source} / {instance} has a valid ' instance.skip = False
f'publishing date and title filter, marking to be unskipped') cap_changed = True
instance.skip = False
cap_changed = True
else:
log.info(f'Media: {instance.source} / {instance} has a valid publishing date '
f'but failed the title filter match, already marked skipped')
else:
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date and '
f'is already marked as not to be skipped')
cap_changed = False
# Recalculate the "can_download" flag, this may # Recalculate the "can_download" flag, this may
# need to change if the source specifications have been changed # need to change if the source specifications have been changed
if instance.metadata: if instance.metadata:
@ -177,7 +152,7 @@ def media_post_save(sender, instance, created, **kwargs):
verbose_name = _('Downloading metadata for "{}"') verbose_name = _('Downloading metadata for "{}"')
download_media_metadata( download_media_metadata(
str(instance.pk), str(instance.pk),
priority=5, priority=10,
verbose_name=verbose_name.format(instance.pk), verbose_name=verbose_name.format(instance.pk),
remove_existing_tasks=True remove_existing_tasks=True
) )
@ -224,16 +199,6 @@ def media_pre_delete(sender, instance, **kwargs):
if thumbnail_url: if thumbnail_url:
delete_task_by_media('sync.tasks.download_media_thumbnail', delete_task_by_media('sync.tasks.download_media_thumbnail',
(str(instance.pk), thumbnail_url)) (str(instance.pk), thumbnail_url))
if instance.source.delete_files_on_disk and (instance.media_file or instance.thumb):
# Delete all media files if it contains filename
filepath = instance.media_file.path if instance.media_file else instance.thumb.path
barefilepath, fileext = os.path.splitext(filepath)
# Get all files that start with the bare file path
all_related_files = glob.glob(f'{barefilepath}.*')
for file in all_related_files:
log.info(f'Deleting file for: {instance} path: {file}')
delete_file(file)
@receiver(post_delete, sender=Media) @receiver(post_delete, sender=Media)

View File

@ -132,23 +132,16 @@ def cleanup_completed_tasks():
def cleanup_old_media(): def cleanup_old_media():
for source in Source.objects.filter(delete_old_media=True, days_to_keep__gt=0): for media in Media.objects.filter(download_date__isnull=False):
delta = timezone.now() - timedelta(days=source.days_to_keep) if media.source.delete_old_media and media.source.days_to_keep > 0:
for media in source.media_source.filter(downloaded=True, download_date__lt=delta): delta = timezone.now() - timedelta(days=media.source.days_to_keep)
log.info(f'Deleting expired media: {source} / {media} ' if media.downloaded and media.download_date < delta:
f'(now older than {source.days_to_keep} days / ' # Media was downloaded after the cutoff date, delete it
f'download_date before {delta})') log.info(f'Deleting expired media: {media.source} / {media} '
# .delete() also triggers a pre_delete signal that removes the files f'(now older than {media.source.days_to_keep} days / '
media.delete() f'download_date before {delta})')
# .delete() also triggers a pre_delete signal that removes the files
media.delete()
def cleanup_removed_media(source, videos):
media_objects = Media.objects.filter(source=source, downloaded=True)
for item in media_objects:
matching_source_item = [video['id'] for video in videos if video['id'] == item.key]
if not matching_source_item:
log.info(f'{item.title} is no longer in source, removing')
item.delete()
@background(schedule=0) @background(schedule=0)
@ -160,6 +153,7 @@ def index_source_task(source_id):
source = Source.objects.get(pk=source_id) source = Source.objects.get(pk=source_id)
except Source.DoesNotExist: except Source.DoesNotExist:
# Task triggered but the Source has been deleted, delete the task # Task triggered but the Source has been deleted, delete the task
delete_index_source_task(source_id)
return return
# Reset any errors # Reset any errors
source.has_failed = False source.has_failed = False
@ -195,9 +189,6 @@ def index_source_task(source_id):
cleanup_completed_tasks() cleanup_completed_tasks()
# Tack on a cleanup of old media # Tack on a cleanup of old media
cleanup_old_media() cleanup_old_media()
if source.delete_removed_media:
log.info(f'Cleaning up media no longer in source {source}')
cleanup_removed_media(source, videos)
@background(schedule=0) @background(schedule=0)
@ -211,6 +202,7 @@ def check_source_directory_exists(source_id):
source = Source.objects.get(pk=source_id) source = Source.objects.get(pk=source_id)
except Source.DoesNotExist: except Source.DoesNotExist:
# Task triggered but the Source has been deleted, delete the task # Task triggered but the Source has been deleted, delete the task
delete_index_source_task(source_id)
return return
# Check the source output directory exists # Check the source output directory exists
if not source.directory_exists(): if not source.directory_exists():
@ -231,9 +223,6 @@ def download_media_metadata(media_id):
log.error(f'Task download_media_metadata(pk={media_id}) called but no ' log.error(f'Task download_media_metadata(pk={media_id}) called but no '
f'media exists with ID: {media_id}') f'media exists with ID: {media_id}')
return return
if media.manual_skip:
log.info(f'Task for ID: {media_id} skipped, due to task being manually skipped.')
return
source = media.source source = media.source
metadata = media.index_metadata() metadata = media.index_metadata()
media.metadata = json.dumps(metadata, default=json_serial) media.metadata = json.dumps(metadata, default=json_serial)
@ -252,11 +241,6 @@ def download_media_metadata(media_id):
log.warn(f'Media: {source} / {media} is older than cap age ' log.warn(f'Media: {source} / {media} is older than cap age '
f'{max_cap_age}, skipping') f'{max_cap_age}, skipping')
media.skip = True media.skip = True
# If the source has a search filter, check the video title matches the filter
if source.filter_text and not source.is_regex_match(media.title):
# Filter text not found in the media title. Accepts regex string, blank search filter results in this returning false
log.warn(f'Media: {source} / {media} does not match {source.filter_text}, skipping')
media.skip = True
# If the source has a cut-off check the upload date is within the allowed delta # If the source has a cut-off check the upload date is within the allowed delta
if source.delete_old_media and source.days_to_keep > 0: if source.delete_old_media and source.days_to_keep > 0:
if not isinstance(media.published, datetime): if not isinstance(media.published, datetime):

View File

@ -43,11 +43,6 @@
<td>Full source name</td> <td>Full source name</td>
<td>My Source</td> <td>My Source</td>
</tr> </tr>
<tr>
<td>{uploader}</td>
<td>Uploader name</td>
<td>Some Channel Name</td>
</tr>
<tr> <tr>
<td>{title}</td> <td>{title}</td>
<td>Lower case media title, max 80 chars</td> <td>Lower case media title, max 80 chars</td>

View File

@ -10,23 +10,15 @@
<p class="truncate"><strong><a href="{{ media.url }}" target="_blank"><i class="fas fa-link"></i> {{ media.url }}</a></strong></p> <p class="truncate"><strong><a href="{{ media.url }}" target="_blank"><i class="fas fa-link"></i> {{ media.url }}</a></strong></p>
<p class="truncate">Downloading to: <strong>{{ media.source.directory_path }}</strong></p> <p class="truncate">Downloading to: <strong>{{ media.source.directory_path }}</strong></p>
{% if download_state == 'downloaded' %} {% if download_state == 'downloaded' %}
{% if media.source.is_audio %}
<audio controls src="{% url 'sync:media-content' pk=media.pk %}"></audio>
{% else %}
<video controls style="width: 100%"> <video controls style="width: 100%">
<source src="{% url 'sync:media-content' pk=media.pk %}"> <source src="{% url 'sync:media-content' pk=media.pk %}">
</video> </video>
{% endif %} <p class="truncate"><a href="{% url 'sync:media-content' pk=media.pk %}" download="{{ media.filename }}">Download</a></p>
<p class="truncate"><a href="{% url 'sync:media-content' pk=media.pk %}" download="{{ media.filename }}"><strong><i class="fas fa-download"></i> Download</strong></a></p>
{% endif %} {% endif %}
</div> </div>
</div> </div>
{% if media.manual_skip %}{% include 'errorbox.html' with message='Media is marked to be skipped and will not be downloaded.' %} {% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
{% else %} {% if media.skip %}{% include 'errorbox.html' with message='Media is marked to be skipped and will not be downloaded.' %}{% endif %}
{% if not media.can_download %}{% include 'errorbox.html' with message='Media cannot be downloaded because it has no formats which match the source requirements.' %}{% endif %}
{% if media.skip %}{% include 'errorbox.html' with message='This media may be skipped due to error(s).' %}{% endif %}
{% endif %}
{% include 'infobox.html' with message=message %} {% include 'infobox.html' with message=message %}
<div class="row"> <div class="row">
<div class="col s12 m7"> <div class="col s12 m7">
@ -170,10 +162,10 @@
{% else %} {% else %}
<div class="row"> <div class="row">
<div class="col s12"> <div class="col s12">
{% if media.manual_skip %} {% if media.skip %}
<a href="{% url 'sync:enable-media' pk=media.pk %}" class="btn">Unskip media (manually) <i class="fas fa-cloud-download-alt"></i></a> <a href="{% url 'sync:enable-media' pk=media.pk %}" class="btn">Enable (unskip) media <i class="fas fa-cloud-download-alt"></i></a>
{% else %} {% else %}
<a href="{% url 'sync:skip-media' pk=media.pk %}" class="btn delete-button">Manually mark media to be skipped <i class="fas fa-times-circle"></i></a> <a href="{% url 'sync:skip-media' pk=media.pk %}" class="btn delete-button">Skip media <i class="fas fa-times-circle"></i></a>
{% endif %} {% endif %}
</div> </div>
</div> </div>

View File

@ -36,10 +36,8 @@
{% if m.downloaded %} {% if m.downloaded %}
<i class="fas fa-check-circle" title="Downloaded"></i> {{ m.download_date|date:'Y-m-d' }} <i class="fas fa-check-circle" title="Downloaded"></i> {{ m.download_date|date:'Y-m-d' }}
{% else %} {% else %}
{% if m.manual_skip %} {% if m.skip %}
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Manually skipped</span> <span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
{% elif m.skip %}
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped by system</span>
{% elif not m.source.download_media %} {% elif not m.source.download_media %}
<span class="error-text"><i class="fas fa-times" title="Not downloading media for this source"></i> Disabled at source</span> <span class="error-text"><i class="fas fa-times" title="Not downloading media for this source"></i> Disabled at source</span>
{% elif not m.has_metadata %} {% elif not m.has_metadata %}
@ -64,5 +62,5 @@
</div> </div>
{% endfor %} {% endfor %}
</div> </div>
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped only_skipped=only_skipped%} {% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped %}
{% endblock %} {% endblock %}

View File

@ -9,8 +9,8 @@
<p> <p>
Are you sure you want to delete this source? Deleting a source is permanent. Are you sure you want to delete this source? Deleting a source is permanent.
By default, deleting a source does not delete any saved media files. You can By default, deleting a source does not delete any saved media files. You can
<strong>tick the &quot;also delete downloaded media&quot; checkbox to also remove directory {{ source.directory_path }} tick the &quot;also delete downloaded media&quot; checkbox to also remove save
</strong>when you delete the source. Deleting a source cannot be undone. media when you delete the source. Deleting a source cannot be undone.
</p> </p>
</div> </div>
</div> </div>

View File

@ -43,10 +43,6 @@
<td class="hide-on-small-only">Directory</td> <td class="hide-on-small-only">Directory</td>
<td><span class="hide-on-med-and-up">Directory<br></span><strong>{{ source.directory }}</strong></td> <td><span class="hide-on-med-and-up">Directory<br></span><strong>{{ source.directory }}</strong></td>
</tr> </tr>
<tr title="Filter text">
<td class="hide-on-small-only">Filter text</td>
<td><span class="hide-on-med-and-up">Filter text<br></span><strong>{{ source.filter_text }}</strong></td>
</tr>
<tr title="Media file name format to use for saving files"> <tr title="Media file name format to use for saving files">
<td class="hide-on-small-only">Media format</td> <td class="hide-on-small-only">Media format</td>
<td><span class="hide-on-med-and-up">Media format<br></span><strong>{{ source.media_format }}</strong></td> <td><span class="hide-on-med-and-up">Media format<br></span><strong>{{ source.media_format }}</strong></td>
@ -119,14 +115,6 @@
<td class="hide-on-small-only">Write JSON?</td> <td class="hide-on-small-only">Write JSON?</td>
<td><span class="hide-on-med-and-up">Write JSON?<br></span><strong>{% if source.write_json %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td> <td><span class="hide-on-med-and-up">Write JSON?<br></span><strong>{% if source.write_json %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
</tr> </tr>
<tr title="Delete media that is no longer on this playlist?">
<td class="hide-on-small-only">Delete removed media</td>
<td><span class="hide-on-med-and-up">Delete removed media<br></span><strong>{% if source.delete_removed_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
</tr>
<tr title="Delete files on disk when they are removed from TubeSync?">
<td class="hide-on-small-only">Delete files on disk</td>
<td><span class="hide-on-med-and-up">Delete files on disk<br></span><strong>{% if source.delete_files_on_disk %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
</tr>
{% if source.delete_old_media and source.days_to_keep > 0 %} {% if source.delete_old_media and source.days_to_keep > 0 %}
<tr title="Days after which your media from this source will be locally deleted"> <tr title="Days after which your media from this source will be locally deleted">
<td class="hide-on-small-only">Delete old media</td> <td class="hide-on-small-only">Delete old media</td>
@ -142,55 +130,6 @@
<td class="hide-on-small-only">UUID</td> <td class="hide-on-small-only">UUID</td>
<td><span class="hide-on-med-and-up">UUID<br></span><strong>{{ source.uuid }}</strong></td> <td><span class="hide-on-med-and-up">UUID<br></span><strong>{{ source.uuid }}</strong></td>
</tr> </tr>
<tr title="{{ _('Embedding thumbnail?') }}">
<td class="hide-on-small-only">{{ _("Embed thumbnail?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Embed thumbnail?") }}<br></span><strong><i class="fas {% if source.embed_thumbnail %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
<tr title="{{ _('Embedding metadata?') }}">
<td class="hide-on-small-only">{{ _("Embed metadata?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Embed metadata?") }}<br></span><strong><i class="fas {% if source.embed_metadata %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
<tr title="{{ _('Is sponsorblock enabled?') }}">
<td class="hide-on-small-only">{{ _("SponsorBlock?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Sponsorblock enabled?") }}<br></span><strong><i class="fas {% if source.enable_sponsorblock %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
{% if source.enable_sponsorblock %}
<tr title="{{ _('SponsorBlock: What to block?') }}">
<td class="hide-on-small-only">{{ _("What blocked?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("What blocked?") }}<br></span><strong>
{% if source.sponsorblock_categories.all_choice in source.sponsorblock_categories.selected_choices %}
{% for k,v in source.sponsorblock_categories.possible_choices %}
{{ v }}: <i class="fas fa-check"></i><BR>
{% endfor %}
{% else %}
{% for c in source.sponsorblock_categories.selected_choices %}
{% for k,v in source.sponsorblock_categories.possible_choices %}
{% if k == c %} {{ v }}: <i class="fas fa-check"></i><BR>{% endif %}
{% endfor %}
{% endfor %}
{% endif %}
</strong></td>
</tr>
{% endif %}
<tr title="{{ _('Are Subtitles downloaded?') }}">
<td class="hide-on-small-only">{{ _("Download subtitles?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Download subtitles?") }}:</span><strong><i class="fas {% if source.write_subtitles %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
{% if source.write_subtitles %}
<tr title="{{ _('Are auto subs accepted?') }}">
<td class="hide-on-small-only">{{ _("Auto-generated subtitles?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Auto-generated subtitles?") }}:</span><strong><i class="fas {% if source.auto_subtitles %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
</tr>
<tr title="{{ _('Subs langs?') }}">
<td class="hide-on-small-only">{{ _("Subs langs?") }}:</td>
<td><span class="hide-on-med-and-up">{{ _("Subs langs?") }}:</span><strong>{{source.sub_langs}}</strong></td>
</tr>
{% endif %}
</table> </table>
</div> </div>
</div> </div>

View File

@ -24,18 +24,15 @@
<div class="col s12"> <div class="col s12">
<div class="collection"> <div class="collection">
{% for source in sources %} {% for source in sources %}
<span class="collection-item flex-collection-container"> <a href="{% url 'sync:source' pk=source.pk %}" class="collection-item">
<a href="{% url 'sync:source' pk=source.pk %}" class="flex-grow"> {{ source.icon|safe }} <strong>{{ source.name }}</strong> ({{ source.get_source_type_display }} &quot;{{ source.key }}&quot;)<br>
{{ source.icon|safe }} <strong>{{ source.name }}</strong> ({{ source.get_source_type_display }} &quot;{{ source.key }}&quot;)<br> {{ source.format_summary }}<br>
{{ source.format_summary }}<br> {% if source.has_failed %}
{% if source.has_failed %} <span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span> {% else %}
{% else %} <strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %} {% endif %}
{% endif %} </a>
</a>
<a href="{% url 'sync:source-sync-now' pk=source.pk %}" class="collection-item"><i class="fas fa-arrow-rotate-right"></i></a>
</span>
{% empty %} {% empty %}
<span class="collection-item no-items"><i class="fas fa-info-circle"></i> You haven't added any sources.</span> <span class="collection-item no-items"><i class="fas fa-info-circle"></i> You haven't added any sources.</span>
{% endfor %} {% endfor %}

View File

@ -66,7 +66,7 @@
{% for task in scheduled %} {% for task in scheduled %}
<a href="{% url task.url pk=task.instance.pk %}" class="collection-item"> <a href="{% url task.url pk=task.instance.pk %}" class="collection-item">
<i class="fas fa-stopwatch"></i> <strong>{{ task }}</strong><br> <i class="fas fa-stopwatch"></i> <strong>{{ task }}</strong><br>
{% if task.instance.index_schedule and task.repeat > 0 %}Scheduled to run {{ task.instance.get_index_schedule_display|lower }}.<br>{% endif %} {% if task.instance.index_schedule %}Scheduled to run {{ task.instance.get_index_schedule_display|lower }}.<br>{% endif %}
<i class="fas fa-redo"></i> Task will run {% if task.run_now %}<strong>immediately</strong>{% else %}at <strong>{{ task.run_at|date:'Y-m-d H:i:s' }}</strong>{% endif %} <i class="fas fa-redo"></i> Task will run {% if task.run_now %}<strong>immediately</strong>{% else %}at <strong>{{ task.run_at|date:'Y-m-d H:i:s' }}</strong>{% endif %}
</a> </a>
{% empty %} {% empty %}

View File

@ -1,7 +0,0 @@
<!--<input type="{{ option.type }}" name="{{ option.name }}" value="{{ option.value }}" id="{{ option.value }}"><BR>
<label for="{{ option.value }}">{{option.label}}</label>-->
<label>
<input type="{{ option.type }}" name="{{ option.name }}" value="{{ option.value }}" id="{{ option.value }}" {% if option.checked %}checked{% endif %}>
<span>{{option.label}}</span>
</label>

View File

@ -1,5 +0,0 @@
</label>
{% for option in widget.multipleChoiceProperties %}
{% include option.template_name with option=option %}
{% endfor %}
<label>

File diff suppressed because it is too large Load Diff

View File

@ -6,7 +6,7 @@
import logging import logging
from datetime import datetime, timedelta from datetime import datetime
from urllib.parse import urlsplit from urllib.parse import urlsplit
from xml.etree import ElementTree from xml.etree import ElementTree
from django.conf import settings from django.conf import settings
@ -14,7 +14,6 @@ from django.test import TestCase, Client
from django.utils import timezone from django.utils import timezone
from background_task.models import Task from background_task.models import Task
from .models import Source, Media from .models import Source, Media
from .tasks import cleanup_old_media
class FrontEndTestCase(TestCase): class FrontEndTestCase(TestCase):
@ -175,7 +174,6 @@ class FrontEndTestCase(TestCase):
'directory': 'testdirectory', 'directory': 'testdirectory',
'media_format': settings.MEDIA_FORMATSTR_DEFAULT, 'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
'download_cap': 0, 'download_cap': 0,
'filter_text':'.*',
'index_schedule': 3600, 'index_schedule': 3600,
'delete_old_media': False, 'delete_old_media': False,
'days_to_keep': 14, 'days_to_keep': 14,
@ -184,8 +182,7 @@ class FrontEndTestCase(TestCase):
'source_acodec': 'OPUS', 'source_acodec': 'OPUS',
'prefer_60fps': False, 'prefer_60fps': False,
'prefer_hdr': False, 'prefer_hdr': False,
'fallback': 'f', 'fallback': 'f'
'sub_langs': 'en',
} }
response = c.post('/source-add', data) response = c.post('/source-add', data)
self.assertEqual(response.status_code, 302) self.assertEqual(response.status_code, 302)
@ -218,7 +215,6 @@ class FrontEndTestCase(TestCase):
'directory': 'testdirectory', 'directory': 'testdirectory',
'media_format': settings.MEDIA_FORMATSTR_DEFAULT, 'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
'download_cap': 0, 'download_cap': 0,
'filter_text':'.*',
'index_schedule': Source.IndexSchedule.EVERY_HOUR, 'index_schedule': Source.IndexSchedule.EVERY_HOUR,
'delete_old_media': False, 'delete_old_media': False,
'days_to_keep': 14, 'days_to_keep': 14,
@ -227,8 +223,7 @@ class FrontEndTestCase(TestCase):
'source_acodec': Source.SOURCE_ACODEC_OPUS, 'source_acodec': Source.SOURCE_ACODEC_OPUS,
'prefer_60fps': False, 'prefer_60fps': False,
'prefer_hdr': False, 'prefer_hdr': False,
'fallback': Source.FALLBACK_FAIL, 'fallback': Source.FALLBACK_FAIL
'sub_langs': 'en',
} }
response = c.post(f'/source-update/{source_uuid}', data) response = c.post(f'/source-update/{source_uuid}', data)
self.assertEqual(response.status_code, 302) self.assertEqual(response.status_code, 302)
@ -249,7 +244,6 @@ class FrontEndTestCase(TestCase):
'directory': 'testdirectory', 'directory': 'testdirectory',
'media_format': settings.MEDIA_FORMATSTR_DEFAULT, 'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
'download_cap': 0, 'download_cap': 0,
'filter_text':'.*',
'index_schedule': Source.IndexSchedule.EVERY_2_HOURS, # changed 'index_schedule': Source.IndexSchedule.EVERY_2_HOURS, # changed
'delete_old_media': False, 'delete_old_media': False,
'days_to_keep': 14, 'days_to_keep': 14,
@ -258,8 +252,7 @@ class FrontEndTestCase(TestCase):
'source_acodec': Source.SOURCE_ACODEC_OPUS, 'source_acodec': Source.SOURCE_ACODEC_OPUS,
'prefer_60fps': False, 'prefer_60fps': False,
'prefer_hdr': False, 'prefer_hdr': False,
'fallback': Source.FALLBACK_FAIL, 'fallback': Source.FALLBACK_FAIL
'sub_langs': 'en',
} }
response = c.post(f'/source-update/{source_uuid}', data) response = c.post(f'/source-update/{source_uuid}', data)
self.assertEqual(response.status_code, 302) self.assertEqual(response.status_code, 302)
@ -471,14 +464,11 @@ metadata_60fps_filepath = settings.BASE_DIR / 'sync' / 'testdata' / 'metadata_60
metadata_60fps = open(metadata_60fps_filepath, 'rt').read() metadata_60fps = open(metadata_60fps_filepath, 'rt').read()
metadata_60fps_hdr_filepath = settings.BASE_DIR / 'sync' / 'testdata' / 'metadata_60fps_hdr.json' metadata_60fps_hdr_filepath = settings.BASE_DIR / 'sync' / 'testdata' / 'metadata_60fps_hdr.json'
metadata_60fps_hdr = open(metadata_60fps_hdr_filepath, 'rt').read() metadata_60fps_hdr = open(metadata_60fps_hdr_filepath, 'rt').read()
metadata_20230629_filepath = settings.BASE_DIR / 'sync' / 'testdata' / 'metadata_2023-06-29.json'
metadata_20230629 = open(metadata_20230629_filepath, 'rt').read()
all_test_metadata = { all_test_metadata = {
'boring': metadata, 'boring': metadata,
'hdr': metadata_hdr, 'hdr': metadata_hdr,
'60fps': metadata_60fps, '60fps': metadata_60fps,
'60fps+hdr': metadata_60fps_hdr, '60fps+hdr': metadata_60fps_hdr,
'20230629': metadata_20230629,
} }
@ -661,8 +651,6 @@ class MediaTestCase(TestCase):
'<episodedetails>', '<episodedetails>',
' <title>no fancy stuff title</title>', ' <title>no fancy stuff title</title>',
' <showtitle>testname</showtitle>', ' <showtitle>testname</showtitle>',
' <season>2017</season>',
' <episode></episode>',
' <ratings>', ' <ratings>',
' <rating default="True" max="5" name="youtube">', ' <rating default="True" max="5" name="youtube">',
' <value>1.2345</value>', ' <value>1.2345</value>',
@ -1409,118 +1397,3 @@ class FormatMatchingTestCase(TestCase):
match_type, format_code = self.media.get_best_video_format() match_type, format_code = self.media.get_best_video_format()
self.assertEqual(format_code, expected_format_code) self.assertEqual(format_code, expected_format_code)
self.assertEqual(match_type, expeceted_match_type) self.assertEqual(match_type, expeceted_match_type)
def test_metadata_20230629(self):
self.media.metadata = all_test_metadata['20230629']
expected_matches = {
# (format, vcodec, prefer_60fps, prefer_hdr): (match_type, code),
('360p', 'AVC1', False, True): (False, '134'), # Fallback match, no hdr
('360p', 'AVC1', True, False): (False, '134'), # Fallback match, no 60fps
('360p', 'AVC1', True, True): (False, '332'), # Fallback match, 60fps+hdr, switched to VP9
('360p', 'VP9', False, False): (True, '243'), # Exact match
('360p', 'VP9', False, True): (True, '332'), # Exact match, hdr
('360p', 'VP9', True, False): (False, '332'), # Fallback match, 60fps, extra hdr
('360p', 'VP9', True, True): (True, '332'), # Exact match, 60fps+hdr
('480p', 'AVC1', False, False): (True, '135'), # Exact match
('480p', 'AVC1', False, True): (False, '135'), # Fallback match, no hdr
('480p', 'AVC1', True, False): (False, '135'), # Fallback match, no 60fps
('480p', 'AVC1', True, True): (False, '333'), # Fallback match, 60fps+hdr, switched to VP9
('480p', 'VP9', False, False): (True, '244'), # Exact match
('480p', 'VP9', False, True): (True, '333'), # Exact match, hdr
('480p', 'VP9', True, False): (False, '333'), # Fallback match, 60fps, extra hdr
('480p', 'VP9', True, True): (True, '333'), # Exact match, 60fps+hdr
('720p', 'AVC1', False, False): (True, '136'), # Exact match
('720p', 'AVC1', False, True): (False, '136'), # Fallback match, no hdr
('720p', 'AVC1', True, False): (True, '298'), # Exact match, 60fps
('720p', 'AVC1', True, True): (False, '334'), # Fallback match, 60fps+hdr, switched to VP9
('720p', 'VP9', False, False): (True, '247'), # Exact match
('720p', 'VP9', False, True): (True, '334'), # Exact match, hdr
('720p', 'VP9', True, False): (True, '302'), # Exact match, 60fps
('720p', 'VP9', True, True): (True, '334'), # Exact match, 60fps+hdr
('1440p', 'AVC1', False, False): (False, '308'), # Fallback match, 60fps, switched to VP9 (no 1440p AVC1)
('1440p', 'AVC1', False, True): (False, '336'), # Fallback match, 60fps+hdr, switched to VP9 (no 1440p AVC1)
('1440p', 'AVC1', True, False): (False, '308'), # Fallback match, 60fps, switched to VP9 (no 1440p AVC1)
('1440p', 'AVC1', True, True): (False, '336'), # Fallback match, 60fps+hdr, switched to VP9 (no 1440p AVC1)
('1440p', 'VP9', False, False): (False, '308'), # Fallback, 60fps
('1440p', 'VP9', False, True): (True, '336'), # Exact match, hdr
('1440p', 'VP9', True, False): (True, '308'), # Exact match, 60fps
('1440p', 'VP9', True, True): (True, '336'), # Exact match, 60fps+hdr
('2160p', 'AVC1', False, False): (False, '315'), # Fallback, 60fps, switched to VP9 (no 2160p AVC1)
('2160p', 'AVC1', False, True): (False, '337'), # Fallback match, 60fps+hdr, switched to VP9 (no 2160p AVC1)
('2160p', 'AVC1', True, False): (False, '315'), # Fallback, switched to VP9 (no 2160p AVC1)
('2160p', 'AVC1', True, True): (False, '337'), # Fallback match, 60fps+hdr, switched to VP9 (no 2160p AVC1)
('2160p', 'VP9', False, False): (False, '315'), # Fallback, 60fps
('2160p', 'VP9', False, True): (True, '337'), # Exact match, hdr
('2160p', 'VP9', True, False): (True, '315'), # Exact match, 60fps
('2160p', 'VP9', True, True): (True, '337'), # Exact match, 60fps+hdr
('4320p', 'AVC1', False, False): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'AVC1', False, True): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'AVC1', True, False): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'AVC1', True, True): (False, '272'), # Fallback, 60fps, switched to VP9 (no 4320p AVC1, no other 8k streams)
('4320p', 'VP9', False, False): (False, '272'), # Fallback, 60fps (no other 8k streams)
('4320p', 'VP9', False, True): (False, '272'), # Fallback, 60fps (no other 8k streams)
('4320p', 'VP9', True, False): (True, '272'), # Exact match, 60fps
('4320p', 'VP9', True, True): (False, '272'), # Fallback, 60fps (no other 8k streams)
}
for params, expected in expected_matches.items():
resolution, vcodec, prefer_60fps, prefer_hdr = params
expeceted_match_type, expected_format_code = expected
self.source.source_resolution = resolution
self.source.source_vcodec = vcodec
self.source.prefer_60fps = prefer_60fps
self.source.prefer_hdr = prefer_hdr
# The aim here is to execute the matching code to find error paths, specific testing isn't required
self.media.get_best_video_format()
self.media.get_best_audio_format()
def test_is_regex_match(self):
self.media.metadata = all_test_metadata['boring']
expected_matches = {
('.*'): (True),
('no fancy stuff'): (True),
('No fancy stuff'): (False),
('(?i)No fancy stuff'): (True), #set case insensitive flag
('no'): (True),
('Foo'): (False),
('^(?!.*fancy).*$'): (False),
('^(?!.*funny).*$'): (True),
('(?=.*f.*)(?=.{0,2}|.{4,})'): (True),
('f{4,}'): (False),
('^[^A-Z]*$'): (True),
('^[^a-z]*$'): (False),
('^[^\\s]*$'): (False)
}
for params, expected in expected_matches.items():
self.source.filter_text = params
expected_match_result = expected
self.assertEqual(self.source.is_regex_match(self.media.title), expected_match_result)
class TasksTestCase(TestCase):
def setUp(self):
# Disable general logging for test case
logging.disable(logging.CRITICAL)
def test_delete_old_media(self):
src1 = Source.objects.create(key='aaa', name='aaa', directory='/tmp/a', delete_old_media=False, days_to_keep=14)
src2 = Source.objects.create(key='bbb', name='bbb', directory='/tmp/b', delete_old_media=True, days_to_keep=14)
now = timezone.now()
m11 = Media.objects.create(source=src1, downloaded=True, key='a11', download_date=now - timedelta(days=5))
m12 = Media.objects.create(source=src1, downloaded=True, key='a12', download_date=now - timedelta(days=25))
m13 = Media.objects.create(source=src1, downloaded=False, key='a13')
m21 = Media.objects.create(source=src2, downloaded=True, key='a21', download_date=now - timedelta(days=5))
m22 = Media.objects.create(source=src2, downloaded=True, key='a22', download_date=now - timedelta(days=25))
m23 = Media.objects.create(source=src2, downloaded=False, key='a23')
self.assertEquals(src1.media_source.all().count(), 3)
self.assertEquals(src2.media_source.all().count(), 3)
cleanup_old_media()
self.assertEquals(src1.media_source.all().count(), 3)
self.assertEquals(src2.media_source.all().count(), 2)
self.assertEquals(Media.objects.filter(pk=m22.pk).exists(), False)

View File

@ -28,10 +28,6 @@ urlpatterns = [
ValidateSourceView.as_view(), ValidateSourceView.as_view(),
name='validate-source'), name='validate-source'),
path('source-sync-now/<uuid:pk>',
SourcesView.as_view(),
name='source-sync-now'),
path('source-add', path('source-add',
AddSourceView.as_view(), AddSourceView.as_view(),
name='add-source'), name='add-source'),

View File

@ -78,7 +78,7 @@ def resize_image_to_height(image, width, height):
if scaled_width < width: if scaled_width < width:
# Width too small, stretch it # Width too small, stretch it
scaled_width = width scaled_width = width
image = image.resize((scaled_width, height), Image.LANCZOS) image = image.resize((scaled_width, height), Image.ANTIALIAS)
if scaled_width > width: if scaled_width > width:
# Width too large, crop it # Width too large, crop it
delta = scaled_width - width delta = scaled_width - width

View File

@ -1,24 +1,18 @@
import glob
import os import os
import json import json
from base64 import b64decode from base64 import b64decode
import pathlib
import shutil
import sys
from django.conf import settings from django.conf import settings
from django.http import FileResponse, Http404, HttpResponseNotFound, HttpResponseRedirect from django.http import Http404
from django.views.generic import TemplateView, ListView, DetailView from django.views.generic import TemplateView, ListView, DetailView
from django.views.generic.edit import (FormView, FormMixin, CreateView, UpdateView, from django.views.generic.edit import (FormView, FormMixin, CreateView, UpdateView,
DeleteView) DeleteView)
from django.views.generic.detail import SingleObjectMixin from django.views.generic.detail import SingleObjectMixin
from django.core.exceptions import SuspiciousFileOperation
from django.http import HttpResponse from django.http import HttpResponse
from django.urls import reverse_lazy from django.urls import reverse_lazy
from django.db import IntegrityError from django.db import IntegrityError
from django.db.models import Q, Count, Sum, When, Case from django.db.models import Q, Count, Sum, When, Case
from django.forms import Form, ValidationError from django.forms import ValidationError
from django.utils.text import slugify from django.utils.text import slugify
from django.utils._os import safe_join
from django.utils import timezone from django.utils import timezone
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from common.utils import append_uri_params from common.utils import append_uri_params
@ -61,7 +55,7 @@ class DashboardView(TemplateView):
# Disk usage # Disk usage
disk_usage = Media.objects.filter( disk_usage = Media.objects.filter(
downloaded=True, downloaded_filesize__isnull=False downloaded=True, downloaded_filesize__isnull=False
).defer('metadata').aggregate(Sum('downloaded_filesize')) ).aggregate(Sum('downloaded_filesize'))
data['disk_usage_bytes'] = disk_usage['downloaded_filesize__sum'] data['disk_usage_bytes'] = disk_usage['downloaded_filesize__sum']
if not data['disk_usage_bytes']: if not data['disk_usage_bytes']:
data['disk_usage_bytes'] = 0 data['disk_usage_bytes'] = 0
@ -72,12 +66,12 @@ class DashboardView(TemplateView):
data['average_bytes_per_media'] = 0 data['average_bytes_per_media'] = 0
# Latest downloads # Latest downloads
data['latest_downloads'] = Media.objects.filter( data['latest_downloads'] = Media.objects.filter(
downloaded=True, downloaded_filesize__isnull=False downloaded=True
).defer('metadata').order_by('-download_date')[:10] ).order_by('-download_date')[:10]
# Largest downloads # Largest downloads
data['largest_downloads'] = Media.objects.filter( data['largest_downloads'] = Media.objects.filter(
downloaded=True, downloaded_filesize__isnull=False downloaded=True, downloaded_filesize__isnull=False
).defer('metadata').order_by('-downloaded_filesize')[:10] ).order_by('-downloaded_filesize')[:10]
# UID and GID # UID and GID
data['uid'] = os.getuid() data['uid'] = os.getuid()
data['gid'] = os.getgid() data['gid'] = os.getgid()
@ -98,27 +92,8 @@ class SourcesView(ListView):
paginate_by = settings.SOURCES_PER_PAGE paginate_by = settings.SOURCES_PER_PAGE
messages = { messages = {
'source-deleted': _('Your selected source has been deleted.'), 'source-deleted': _('Your selected source has been deleted.'),
'source-refreshed': _('The source has been scheduled to be synced now.')
} }
def get(self, *args, **kwargs):
if args[0].path.startswith("/source-sync-now/"):
sobj = Source.objects.get(pk=kwargs["pk"])
if sobj is None:
return HttpResponseNotFound()
verbose_name = _('Index media from source "{}" once')
index_source_task(
str(sobj.pk),
queue=str(sobj.pk),
repeat=0,
verbose_name=verbose_name.format(sobj.name))
url = reverse_lazy('sync:sources')
url = append_uri_params(url, {'message': 'source-refreshed'})
return HttpResponseRedirect(url)
else:
return super().get(self, *args, **kwargs)
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.message = None self.message = None
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
@ -296,46 +271,28 @@ class ValidateSourceView(FormView):
class EditSourceMixin: class EditSourceMixin:
model = Source model = Source
fields = ('source_type', 'key', 'name', 'directory', 'filter_text', 'media_format', fields = ('source_type', 'key', 'name', 'directory', 'media_format',
'index_schedule', 'download_media', 'download_cap', 'delete_old_media', 'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
'delete_removed_media', 'delete_files_on_disk', 'days_to_keep', 'source_resolution', 'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
'source_vcodec', 'source_acodec', 'prefer_60fps', 'prefer_hdr', 'fallback', 'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo', 'write_json')
'copy_thumbnails', 'write_nfo', 'write_json', 'embed_metadata', 'embed_thumbnail',
'enable_sponsorblock', 'sponsorblock_categories', 'write_subtitles',
'auto_subtitles', 'sub_langs')
errors = { errors = {
'invalid_media_format': _('Invalid media format, the media format contains ' 'invalid_media_format': _('Invalid media format, the media format contains '
'errors or is empty. Check the table at the end of ' 'errors or is empty. Check the table at the end of '
'this page for valid media name variables'), 'this page for valid media name variables'),
'dir_outside_dlroot': _('You cannot specify a directory outside of the '
'base directory (%BASEDIR%)')
} }
def form_valid(self, form: Form): def form_valid(self, form):
# Perform extra validation to make sure the media_format is valid # Perform extra validation to make sure the media_format is valid
obj = form.save(commit=False) obj = form.save(commit=False)
source_type = form.cleaned_data['media_format'] source_type = form.cleaned_data['media_format']
example_media_file = obj.get_example_media_format() example_media_file = obj.get_example_media_format()
if example_media_file == '': if example_media_file == '':
form.add_error( form.add_error(
'media_format', 'media_format',
ValidationError(self.errors['invalid_media_format']) ValidationError(self.errors['invalid_media_format'])
) )
# Check for suspicious file path(s)
try:
targetCheck = form.cleaned_data['directory']+"/.virt"
newdir = safe_join(settings.DOWNLOAD_ROOT,targetCheck)
except SuspiciousFileOperation:
form.add_error(
'directory',
ValidationError(self.errors['dir_outside_dlroot'].replace("%BASEDIR%",str(settings.DOWNLOAD_ROOT)))
)
if form.errors: if form.errors:
return super().form_invalid(form) return super().form_invalid(form)
return super().form_valid(form) return super().form_valid(form)
@ -406,7 +363,7 @@ class SourceView(DetailView):
error_message = get_error_message(error) error_message = get_error_message(error)
setattr(error, 'error_message', error_message) setattr(error, 'error_message', error_message)
data['errors'].append(error) data['errors'].append(error)
data['media'] = Media.objects.filter(source=self.object).order_by('-published').defer('metadata') data['media'] = Media.objects.filter(source=self.object).order_by('-published')
return data return data
@ -437,13 +394,14 @@ class DeleteSourceView(DeleteView, FormMixin):
source = self.get_object() source = self.get_object()
for media in Media.objects.filter(source=source): for media in Media.objects.filter(source=source):
if media.media_file: if media.media_file:
file_path = media.media_file.path # Delete the media file
matching_files = glob.glob(os.path.splitext(file_path)[0] + '.*') delete_file(media.media_file.path)
for file in matching_files: # Delete thumbnail copy if it exists
delete_file(file) delete_file(media.thumbpath)
directory_path = source.directory_path # Delete NFO file if it exists
if os.path.exists(directory_path): delete_file(media.nfopath)
shutil.rmtree(directory_path, True) # Delete JSON file if it exists
delete_file(media.jsonpath)
return super().post(request, *args, **kwargs) return super().post(request, *args, **kwargs)
def get_success_url(self): def get_success_url(self):
@ -490,16 +448,16 @@ class MediaView(ListView):
if self.show_skipped: if self.show_skipped:
q = Media.objects.filter(source=self.filter_source) q = Media.objects.filter(source=self.filter_source)
elif self.only_skipped: elif self.only_skipped:
q = Media.objects.filter(Q(source=self.filter_source) & (Q(skip=True) | Q(manual_skip=True))) q = Media.objects.filter(source=self.filter_source, skip=True)
else: else:
q = Media.objects.filter(Q(source=self.filter_source) & (Q(skip=False) & Q(manual_skip=False))) q = Media.objects.filter(source=self.filter_source, skip=False)
else: else:
if self.show_skipped: if self.show_skipped:
q = Media.objects.all() q = Media.objects.all()
elif self.only_skipped: elif self.only_skipped:
q = Media.objects.filter(Q(skip=True)|Q(manual_skip=True)) q = Media.objects.filter(skip=True)
else: else:
q = Media.objects.filter(Q(skip=False)&Q(manual_skip=False)) q = Media.objects.filter(skip=False)
return q.order_by('-published', '-created') return q.order_by('-published', '-created')
def get_context_data(self, *args, **kwargs): def get_context_data(self, *args, **kwargs):
@ -654,13 +612,12 @@ class MediaSkipView(FormView, SingleObjectMixin):
delete_task_by_media('sync.tasks.download_media', (str(self.object.pk),)) delete_task_by_media('sync.tasks.download_media', (str(self.object.pk),))
# If the media file exists on disk, delete it # If the media file exists on disk, delete it
if self.object.media_file_exists: if self.object.media_file_exists:
# Delete all files which contains filename delete_file(self.object.media_file.path)
filepath = self.object.media_file.path self.object.media_file = None
barefilepath, fileext = os.path.splitext(filepath) # If the media has an associated thumbnail copied, also delete it
# Get all files that start with the bare file path delete_file(self.object.thumbpath)
all_related_files = glob.glob(f'{barefilepath}.*') # If the media has an associated NFO file with it, also delete it
for file in all_related_files: delete_file(self.object.nfopath)
delete_file(file)
# Reset all download data # Reset all download data
self.object.metadata = None self.object.metadata = None
self.object.downloaded = False self.object.downloaded = False
@ -672,7 +629,6 @@ class MediaSkipView(FormView, SingleObjectMixin):
self.object.downloaded_filesize = None self.object.downloaded_filesize = None
# Mark it to be skipped # Mark it to be skipped
self.object.skip = True self.object.skip = True
self.object.manual_skip = True
self.object.save() self.object.save()
return super().form_valid(form) return super().form_valid(form)
@ -701,7 +657,6 @@ class MediaEnableView(FormView, SingleObjectMixin):
def form_valid(self, form): def form_valid(self, form):
# Mark it as not skipped # Mark it as not skipped
self.object.skip = False self.object.skip = False
self.object.manual_skip = False
self.object.save() self.object.save()
return super().form_valid(form) return super().form_valid(form)
@ -722,38 +677,11 @@ class MediaContent(DetailView):
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
self.object = self.get_object() self.object = self.get_object()
# development direct file stream - DO NOT USE PRODUCTIVLY
if settings.DEBUG and 'runserver' in sys.argv:
# get media URL
pth = self.object.media_file.url
# remove "/media-data/"
pth = pth.split("/media-data/",1)[1]
# remove "/" (incase of absolute path)
pth = pth.split(str(settings.DOWNLOAD_ROOT).lstrip("/"),1)
# if we do not have a "/" at the beginning, it is not a absolute path... headers = {
if len(pth) > 1: 'X-Accel-Redirect': self.object.media_file.url,
pth = pth[1] }
else: return HttpResponse(headers=headers)
pth = pth[0]
# build final path
filepth = pathlib.Path(str(settings.DOWNLOAD_ROOT) + pth)
if filepth.exists():
# return file
response = FileResponse(open(filepth,'rb'))
return response
else:
return HttpResponseNotFound()
else:
headers = {
'Content-Type': self.object.content_type,
'X-Accel-Redirect': self.object.media_file.url,
}
return HttpResponse(headers=headers)
class TasksView(ListView): class TasksView(ListView):

View File

@ -1,5 +1,5 @@
''' '''
Wrapper for the yt-dlp library. Used so if there are any library interface Wrapper for the youtube-dl library. Used so if there are any library interface
updates we only need to udpate them in one place. updates we only need to udpate them in one place.
''' '''
@ -64,20 +64,13 @@ def get_media_info(url):
return response return response
def download_media(url, media_format, extension, output_file, info_json, def download_media(url, media_format, extension, output_file, info_json):
sponsor_categories=None,
embed_thumbnail=False, embed_metadata=False, skip_sponsors=True,
write_subtitles=False, auto_subtitles=False, sub_langs='en'):
''' '''
Downloads a YouTube URL to a file on disk. Downloads a YouTube URL to a file on disk.
''' '''
def hook(event): def hook(event):
filename = os.path.basename(event['filename']) filename = os.path.basename(event['filename'])
if event.get('downloaded_bytes') is None or event.get('total_bytes') is None:
return None
if event['status'] == 'error': if event['status'] == 'error':
log.error(f'[youtube-dl] error occured downloading: {filename}') log.error(f'[youtube-dl] error occured downloading: {filename}')
elif event['status'] == 'downloading': elif event['status'] == 'downloading':
@ -106,39 +99,17 @@ def download_media(url, media_format, extension, output_file, info_json,
f'{total_size_str} in {elapsed_str}') f'{total_size_str} in {elapsed_str}')
else: else:
log.warn(f'[youtube-dl] unknown event: {str(event)}') log.warn(f'[youtube-dl] unknown event: {str(event)}')
hook.download_progress = 0 hook.download_progress = 0
ytopts = {
opts = get_yt_opts()
opts.update({
'format': media_format, 'format': media_format,
'merge_output_format': extension, 'merge_output_format': extension,
'outtmpl': output_file, 'outtmpl': output_file,
'quiet': True, 'quiet': True,
'progress_hooks': [hook], 'progress_hooks': [hook],
'writeinfojson': info_json, 'writeinfojson': info_json
'postprocessors': [], })
'writesubtitles': write_subtitles,
'writeautomaticsub': auto_subtitles,
'subtitleslangs': sub_langs.split(','),
}
if not sponsor_categories:
sponsor_categories = []
sbopt = {
'key': 'SponsorBlock',
'categories': sponsor_categories
}
ffmdopt = {
'key': 'FFmpegMetadata',
'add_chapters': embed_metadata,
'add_metadata': embed_metadata
}
opts = get_yt_opts()
if embed_thumbnail:
ytopts['postprocessors'].append({'key': 'EmbedThumbnail'})
if skip_sponsors:
ytopts['postprocessors'].append(sbopt)
ytopts['postprocessors'].append(ffmdopt)
opts.update(ytopts)
with yt_dlp.YoutubeDL(opts) as y: with yt_dlp.YoutubeDL(opts) as y:
try: try:
return y.download([url]) return y.download([url])

View File

@ -25,6 +25,9 @@ DEBUG = True if os.getenv('TUBESYNC_DEBUG', False) else False
FORCE_SCRIPT_NAME = os.getenv('DJANGO_FORCE_SCRIPT_NAME', DJANGO_URL_PREFIX) FORCE_SCRIPT_NAME = os.getenv('DJANGO_FORCE_SCRIPT_NAME', DJANGO_URL_PREFIX)
TIME_ZONE = os.getenv('TZ', 'UTC')
database_dict = {} database_dict = {}
database_connection_env = os.getenv('DATABASE_CONNECTION', '') database_connection_env = os.getenv('DATABASE_CONNECTION', '')
if database_connection_env: if database_connection_env:
@ -64,12 +67,6 @@ YOUTUBE_DL_CACHEDIR = CONFIG_BASE_DIR / 'cache'
COOKIES_FILE = CONFIG_BASE_DIR / 'cookies.txt' COOKIES_FILE = CONFIG_BASE_DIR / 'cookies.txt'
HEALTHCHECK_FIREWALL_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_FIREWAL', 'True')).strip().lower()
HEALTHCHECK_FIREWALL = True if HEALTHCHECK_FIREWALL_STR == 'true' else False
HEALTHCHECK_ALLOWED_IPS_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_ALLOWED_IPS', '127.0.0.1'))
HEALTHCHECK_ALLOWED_IPS = HEALTHCHECK_ALLOWED_IPS_STR.split(',')
BASICAUTH_USERNAME = os.getenv('HTTP_USER', '').strip() BASICAUTH_USERNAME = os.getenv('HTTP_USER', '').strip()
BASICAUTH_PASSWORD = os.getenv('HTTP_PASS', '').strip() BASICAUTH_PASSWORD = os.getenv('HTTP_PASS', '').strip()
if BASICAUTH_USERNAME and BASICAUTH_PASSWORD: if BASICAUTH_USERNAME and BASICAUTH_PASSWORD:

View File

@ -1,4 +1,3 @@
import os
from pathlib import Path from pathlib import Path
@ -7,7 +6,7 @@ CONFIG_BASE_DIR = BASE_DIR
DOWNLOADS_BASE_DIR = BASE_DIR DOWNLOADS_BASE_DIR = BASE_DIR
VERSION = '0.13.3' VERSION = '0.12.0'
SECRET_KEY = '' SECRET_KEY = ''
DEBUG = False DEBUG = False
ALLOWED_HOSTS = [] ALLOWED_HOSTS = []
@ -97,7 +96,7 @@ AUTH_PASSWORD_VALIDATORS = [
LANGUAGE_CODE = 'en-us' LANGUAGE_CODE = 'en-us'
TIME_ZONE = os.getenv('TZ', 'UTC') TIME_ZONE = 'UTC'
USE_I18N = True USE_I18N = True
USE_L10N = True USE_L10N = True
USE_TZ = True USE_TZ = True