Compare commits
30 Commits
v0.13.1
...
feature/wo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0b13065c9d | ||
|
|
7aa9c0ec8a | ||
|
|
e54a762a7b | ||
|
|
512b70adad | ||
|
|
6c21ff15ab | ||
|
|
adf26cb4e3 | ||
|
|
45c12561ba | ||
|
|
2d6f485a5d | ||
|
|
33b471175a | ||
|
|
7f4e8586b7 | ||
|
|
bab4b9b056 | ||
|
|
30c2127271 | ||
|
|
d1cb7ef76c | ||
|
|
1fd4f87c53 | ||
|
|
cf06f4cbc2 | ||
|
|
0523f481d2 | ||
|
|
aa4bd4ec26 | ||
|
|
96d9ee93ef | ||
|
|
8240c49d5c | ||
|
|
0c5e3d3818 | ||
|
|
22edd1bbda | ||
|
|
fea0bb191e | ||
|
|
0f65a4027a | ||
|
|
5cac374486 | ||
|
|
69efc9298d | ||
|
|
1be8dff769 | ||
|
|
350e544594 | ||
|
|
0542c734e5 | ||
|
|
42b337c408 | ||
|
|
2f82f8c599 |
1
.github/workflows/ci.yaml
vendored
1
.github/workflows/ci.yaml
vendored
@@ -4,6 +4,7 @@ env:
|
||||
IMAGE_NAME: tubesync
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/tubesync.iml" filepath="$PROJECT_DIR$/.idea/tubesync.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
20
.idea/tubesync.iml
generated
Normal file
20
.idea/tubesync.iml
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="PyDocumentationSettings">
|
||||
<option name="format" value="PLAIN" />
|
||||
<option name="myDocStringFormat" value="Plain" />
|
||||
</component>
|
||||
<component name="TemplatesService">
|
||||
<option name="TEMPLATE_CONFIGURATION" value="Jinja2" />
|
||||
<option name="TEMPLATE_FOLDERS">
|
||||
<list>
|
||||
<option value="$MODULE_DIR$/tubesync/common/templates" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
</module>
|
||||
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
@@ -2,8 +2,8 @@ FROM debian:bookworm-slim
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG S6_VERSION="3.1.5.0"
|
||||
ARG FFMPEG_DATE="autobuild-2023-09-24-14-11"
|
||||
ARG FFMPEG_VERSION="112171-g13a3e2a9b4"
|
||||
ARG FFMPEG_DATE="autobuild-2023-11-29-14-19"
|
||||
ARG FFMPEG_VERSION="112875-g47e214245b"
|
||||
|
||||
ENV DEBIAN_FRONTEND="noninteractive" \
|
||||
HOME="/root" \
|
||||
@@ -27,8 +27,8 @@ RUN export ARCH=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||
"linux/arm64") echo "https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-aarch64.tar.xz" ;; \
|
||||
*) echo "" ;; esac) && \
|
||||
export FFMPEG_EXPECTED_SHA256=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||
"linux/amd64") echo "71cd08ed38c33ff2625dcca68d05efda090bdae455625d3bb1e4be4a53bf7c11" ;; \
|
||||
"linux/arm64") echo "b6765d97f20cecef0121559ee26a2f0dfbac6aef49c48c71eb703271cb3f527b" ;; \
|
||||
"linux/amd64") echo "36bac8c527bf390603416f749ab0dd860142b0a66f0865b67366062a9c286c8b" ;; \
|
||||
"linux/arm64") echo "8f36e45d99d2367a5c0c220ee3164fa48f4f0cec35f78204ccced8dc303bfbdc" ;; \
|
||||
*) echo "" ;; esac) && \
|
||||
export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
|
||||
"linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/${FFMPEG_DATE}/ffmpeg-N-${FFMPEG_VERSION}-linux64-gpl.tar.xz" ;; \
|
||||
|
||||
4
Makefile
4
Makefile
@@ -29,6 +29,10 @@ runcontainer:
|
||||
$(docker) run --rm --name $(name) --env-file dev.env --log-opt max-size=50m -ti -p 4848:4848 $(image)
|
||||
|
||||
|
||||
stopcontainer:
|
||||
$(docker) stop $(name)
|
||||
|
||||
|
||||
test: build
|
||||
cd tubesync && $(python) manage.py test --verbosity=2 && cd ..
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ $ docker exec -i tubesync python3 /app/manage.py dumpdata > some-file.json
|
||||
Then change you database backend over, then use
|
||||
|
||||
```bash
|
||||
$ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata --format=json -
|
||||
$ cat some-file.json | docker exec -i tubesync python3 /app/manage.py loaddata - --format=json
|
||||
```
|
||||
|
||||
As detailed in the Django documentation:
|
||||
@@ -78,3 +78,46 @@ entry in the container or stdout logs:
|
||||
|
||||
If you see a line similar to the above and the web interface loads, congratulations,
|
||||
you are now using an external database server for your TubeSync data!
|
||||
|
||||
## Docker Compose
|
||||
|
||||
If you're using Docker Compose and simply want to connect to another container with
|
||||
the DB for the performance benefits, a configuration like this would be enough:
|
||||
|
||||
```
|
||||
tubesync-db:
|
||||
image: postgres:15.2
|
||||
container_name: tubesync-db
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /<path/to>/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
- /<path/to>/tubesync-db:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=testpassword
|
||||
|
||||
tubesync:
|
||||
image: ghcr.io/meeb/tubesync:latest
|
||||
container_name: tubesync
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 4848:4848
|
||||
volumes:
|
||||
- /<path/to>/tubesync/config:/config
|
||||
- /<path/to>/YouTube:/downloads
|
||||
environment:
|
||||
- DATABASE_CONNECTION=postgresql://postgres:testpassword@tubesync-db:5432/tubesync
|
||||
depends_on:
|
||||
- tubesync-db
|
||||
```
|
||||
|
||||
Note that an `init.sql` file is needed to initialize the `tubesync`
|
||||
database before it can be written to. This file should contain:
|
||||
|
||||
```
|
||||
CREATE DATABASE tubesync;
|
||||
```
|
||||
|
||||
Then it must be mapped to `/docker-entrypoint-initdb.d/init.sql` for it
|
||||
to be executed on first startup of the container. See the `tubesync-db`
|
||||
volume mapping above for how to do this.
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import logging
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
logging_level = logging.DEBUG if settings.DEBUG else logging.INFO
|
||||
|
||||
|
||||
log = logging.getLogger('tubesync')
|
||||
log.setLevel(logging.DEBUG)
|
||||
log.setLevel(logging_level)
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.DEBUG)
|
||||
ch.setLevel(logging_level)
|
||||
formatter = logging.Formatter('%(asctime)s [%(name)s/%(levelname)s] %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
log.addHandler(ch)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<div class="col s12">
|
||||
<div class="pagination">
|
||||
{% for i in paginator.page_range %}
|
||||
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}">{{ i }}</a>
|
||||
<a class="pagenum{% if i == page_obj.number %} currentpage{% endif %}" href="?{% if filter %}filter={{ filter }}&{% endif %}page={{ i }}{% if show_skipped %}&show_skipped=yes{% endif %}{% if only_skipped %}&only_skipped=yes{% endif %}">{{ i }}</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
17
tubesync/sync/migrations/0019_add_delete_removed_media.py
Normal file
17
tubesync/sync/migrations/0019_add_delete_removed_media.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by pac
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('sync', '0018_source_subtitles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='source',
|
||||
name='delete_removed_media',
|
||||
field=models.BooleanField(default=False, help_text='Delete media that is no longer on this playlist', verbose_name='delete removed media'),
|
||||
),
|
||||
]
|
||||
29
tubesync/sync/migrations/0020_auto_20231024_1825.py
Normal file
29
tubesync/sync/migrations/0020_auto_20231024_1825.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 3.2.22 on 2023-10-24 17:25
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('sync', '0019_add_delete_removed_media'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='source',
|
||||
name='filter_text',
|
||||
field=models.CharField(blank=True, default='', help_text='Regex compatible filter string for video titles', max_length=100, verbose_name='filter string'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='source',
|
||||
name='auto_subtitles',
|
||||
field=models.BooleanField(default=False, help_text='Accept auto-generated subtitles', verbose_name='accept auto-generated subs'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='source',
|
||||
name='sub_langs',
|
||||
field=models.CharField(default='en', help_text='List of subtitles langs to download, comma-separated. Example: en,fr or all,-fr,-live_chat', max_length=30, validators=[django.core.validators.RegexValidator(message='Subtitle langs must be a comma-separated list of langs. example: en,fr or all,-fr,-live_chat', regex='^(\\-?[\\_\\.a-zA-Z]+,)*(\\-?[\\_\\.a-zA-Z]+){1}$')], verbose_name='subs langs'),
|
||||
),
|
||||
]
|
||||
22
tubesync/sync/migrations/0021_add_lightweight_metadata.py
Normal file
22
tubesync/sync/migrations/0021_add_lightweight_metadata.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by pac
|
||||
|
||||
from django.db import migrations, models
|
||||
from sync.models import Source
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('sync', '0020_auto_20231024_1825'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='source',
|
||||
name='lightweight_metadata',
|
||||
field=models.CharField(max_length=20,
|
||||
default=Source.LIGHTWEIGHT_METADATA_TYPE_RAW,
|
||||
choices=Source.LIGHTWEIGHT_METADATA_TYPE_CHOICES,
|
||||
help_text='Lightweight metadata',
|
||||
verbose_name='lightweight metadata'),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
import uuid
|
||||
import json
|
||||
import re
|
||||
from xml.etree import ElementTree
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, timedelta
|
||||
@@ -287,6 +288,18 @@ class Source(models.Model):
|
||||
help_text=_('If "delete old media" is ticked, the number of days after which '
|
||||
'to automatically delete media')
|
||||
)
|
||||
filter_text = models.CharField(
|
||||
_('filter string'),
|
||||
max_length=100,
|
||||
default='',
|
||||
blank=True,
|
||||
help_text=_('Regex compatible filter string for video titles')
|
||||
)
|
||||
delete_removed_media = models.BooleanField(
|
||||
_('delete removed media'),
|
||||
default=False,
|
||||
help_text=_('Delete media that is no longer on this playlist')
|
||||
)
|
||||
source_resolution = models.CharField(
|
||||
_('source resolution'),
|
||||
max_length=8,
|
||||
@@ -374,6 +387,24 @@ class Source(models.Model):
|
||||
]
|
||||
)
|
||||
|
||||
LIGHTWEIGHT_METADATA_TYPE_RAW = 'RAW'
|
||||
LIGHTWEIGHT_METADATA_TYPE_UNNECESSARY = 'UNNECESSARY'
|
||||
LIGHTWEIGHT_METADATA_TYPE_FEATHER = 'FEATHER'
|
||||
LIGHTWEIGHT_METADATA_TYPES = (LIGHTWEIGHT_METADATA_TYPE_RAW, LIGHTWEIGHT_METADATA_TYPE_UNNECESSARY, LIGHTWEIGHT_METADATA_TYPE_FEATHER)
|
||||
LIGHTWEIGHT_METADATA_TYPE_CHOICES = (
|
||||
(LIGHTWEIGHT_METADATA_TYPE_RAW, _("(LARGE) Save raw metadata")),
|
||||
(LIGHTWEIGHT_METADATA_TYPE_UNNECESSARY, _("(MEDIUM) Treeshake unnecessary metadata json keys")),
|
||||
(LIGHTWEIGHT_METADATA_TYPE_FEATHER, _("(TINY) if the capacity is large, Treeshake it event if it is in use")),
|
||||
)
|
||||
|
||||
lightweight_metadata = models.CharField(
|
||||
_('lightweight metadata'),
|
||||
max_length=20,
|
||||
default=LIGHTWEIGHT_METADATA_TYPE_RAW,
|
||||
choices=LIGHTWEIGHT_METADATA_TYPE_CHOICES,
|
||||
help_text=_('Lightweight metadata')
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@@ -510,7 +541,8 @@ class Source(models.Model):
|
||||
'mm': now.strftime('%m'),
|
||||
'dd': now.strftime('%d'),
|
||||
'source': self.slugname,
|
||||
'source_full': self.source.name,
|
||||
'source_full': self.name,
|
||||
'uploader': 'Some Channel Name',
|
||||
'title': 'some-media-title-name',
|
||||
'title_full': 'Some Media Title Name',
|
||||
'key': 'SoMeUnIqUiD',
|
||||
@@ -532,6 +564,11 @@ class Source(models.Model):
|
||||
except Exception as e:
|
||||
return ''
|
||||
|
||||
def is_regex_match(self, media_item_title):
|
||||
if not self.filter_text:
|
||||
return True
|
||||
return bool(re.search(self.filter_text, media_item_title))
|
||||
|
||||
def index_media(self):
|
||||
'''
|
||||
Index the media source returning a list of media metadata as dicts.
|
||||
@@ -850,7 +887,7 @@ class Media(models.Model):
|
||||
|
||||
def get_best_video_format(self):
|
||||
return get_best_video_format(self)
|
||||
|
||||
|
||||
def get_format_str(self):
|
||||
'''
|
||||
Returns a youtube-dl compatible format string for the best matches
|
||||
@@ -875,7 +912,7 @@ class Media(models.Model):
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def get_display_format(self, format_str):
|
||||
'''
|
||||
Returns a tuple used in the format component of the output filename. This
|
||||
@@ -1166,7 +1203,7 @@ class Media(models.Model):
|
||||
filename = self.filename
|
||||
prefix, ext = os.path.splitext(filename)
|
||||
return f'{prefix}.nfo'
|
||||
|
||||
|
||||
@property
|
||||
def nfopath(self):
|
||||
return self.source.directory_path / self.nfoname
|
||||
@@ -1179,7 +1216,7 @@ class Media(models.Model):
|
||||
filename = self.filename
|
||||
prefix, ext = os.path.splitext(filename)
|
||||
return f'{prefix}.info.json'
|
||||
|
||||
|
||||
@property
|
||||
def jsonpath(self):
|
||||
return self.source.directory_path / self.jsonname
|
||||
|
||||
@@ -96,14 +96,14 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
# If the media is skipped manually, bail.
|
||||
if instance.manual_skip:
|
||||
return
|
||||
|
||||
# Triggered after media is saved
|
||||
cap_changed = False
|
||||
can_download_changed = False
|
||||
# Reset the skip flag if the download cap has changed if the media has not
|
||||
# already been downloaded
|
||||
if not instance.downloaded:
|
||||
if not instance.downloaded and instance.metadata:
|
||||
max_cap_age = instance.source.download_cap_date
|
||||
filter_text = instance.source.filter_text.strip()
|
||||
published = instance.published
|
||||
if not published:
|
||||
if not instance.skip:
|
||||
@@ -117,11 +117,20 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
else:
|
||||
if max_cap_age:
|
||||
if published > max_cap_age and instance.skip:
|
||||
# Media was published after the cap date but is set to be skipped
|
||||
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||
f'publishing date, marking to be unskipped')
|
||||
instance.skip = False
|
||||
cap_changed = True
|
||||
if filter_text:
|
||||
if instance.source.is_regex_match(instance.title):
|
||||
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||
f'publishing date and title filter, marking to be unskipped')
|
||||
instance.skip = False
|
||||
cap_changed = True
|
||||
else:
|
||||
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date '
|
||||
f'but failed the title filter match, already marked skipped')
|
||||
else:
|
||||
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||
f'publishing date, marking to be unskipped')
|
||||
instance.skip = False
|
||||
cap_changed = True
|
||||
elif published <= max_cap_age and not instance.skip:
|
||||
log.info(f'Media: {instance.source} / {instance} is too old for '
|
||||
f'the download cap date, marking to be skipped')
|
||||
@@ -130,10 +139,20 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
else:
|
||||
if instance.skip:
|
||||
# Media marked to be skipped but source download cap removed
|
||||
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||
f'publishing date, marking to be unskipped')
|
||||
instance.skip = False
|
||||
cap_changed = True
|
||||
if filter_text:
|
||||
if instance.source.is_regex_match(instance.title):
|
||||
log.info(f'Media: {instance.source} / {instance} has a valid '
|
||||
f'publishing date and title filter, marking to be unskipped')
|
||||
instance.skip = False
|
||||
cap_changed = True
|
||||
else:
|
||||
log.info(f'Media: {instance.source} / {instance} has a valid publishing date '
|
||||
f'but failed the title filter match, already marked skipped')
|
||||
else:
|
||||
log.debug(f'Media: {instance.source} / {instance} has a valid publishing date and '
|
||||
f'is already marked as not to be skipped')
|
||||
|
||||
cap_changed = False
|
||||
# Recalculate the "can_download" flag, this may
|
||||
# need to change if the source specifications have been changed
|
||||
if instance.metadata:
|
||||
|
||||
@@ -142,6 +142,15 @@ def cleanup_old_media():
|
||||
media.delete()
|
||||
|
||||
|
||||
def cleanup_removed_media(source, videos):
|
||||
media_objects = Media.objects.filter(source=source, downloaded=True)
|
||||
for item in media_objects:
|
||||
matching_source_item = [video['id'] for video in videos if video['id'] == item.key]
|
||||
if not matching_source_item:
|
||||
log.info(f'{item.title} is no longer in source, removing')
|
||||
item.delete()
|
||||
|
||||
|
||||
@background(schedule=0)
|
||||
def index_source_task(source_id):
|
||||
'''
|
||||
@@ -186,6 +195,9 @@ def index_source_task(source_id):
|
||||
cleanup_completed_tasks()
|
||||
# Tack on a cleanup of old media
|
||||
cleanup_old_media()
|
||||
if source.delete_removed_media:
|
||||
log.info(f'Cleaning up media no longer in source {source}')
|
||||
cleanup_removed_media(source, videos)
|
||||
|
||||
|
||||
@background(schedule=0)
|
||||
@@ -219,13 +231,17 @@ def download_media_metadata(media_id):
|
||||
log.error(f'Task download_media_metadata(pk={media_id}) called but no '
|
||||
f'media exists with ID: {media_id}')
|
||||
return
|
||||
|
||||
if media.manual_skip:
|
||||
log.info(f'Task for ID: {media_id} skipped, due to task being manually skipped.')
|
||||
return
|
||||
|
||||
source = media.source
|
||||
metadata = media.index_metadata()
|
||||
if source.lightweight_metadata == Source.LIGHTWEIGHT_METADATA_TYPE_FEATHER:
|
||||
del metadata["formats"]
|
||||
del metadata["thumbnails"]
|
||||
del metadata["automatic_captions"]
|
||||
del metadata["requested_formats"]
|
||||
del metadata["heatmap"]
|
||||
media.metadata = json.dumps(metadata, default=json_serial)
|
||||
upload_date = media.upload_date
|
||||
# Media must have a valid upload date
|
||||
@@ -242,6 +258,11 @@ def download_media_metadata(media_id):
|
||||
log.warn(f'Media: {source} / {media} is older than cap age '
|
||||
f'{max_cap_age}, skipping')
|
||||
media.skip = True
|
||||
# If the source has a search filter, check the video title matches the filter
|
||||
if source.filter_text and not source.is_regex_match(media.title):
|
||||
# Filter text not found in the media title. Accepts regex string, blank search filter results in this returning false
|
||||
log.warn(f'Media: {source} / {media} does not match {source.filter_text}, skipping')
|
||||
media.skip = True
|
||||
# If the source has a cut-off check the upload date is within the allowed delta
|
||||
if source.delete_old_media and source.days_to_keep > 0:
|
||||
if not isinstance(media.published, datetime):
|
||||
|
||||
@@ -132,6 +132,8 @@
|
||||
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if media.can_download %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
|
||||
{% if media.source.lightweight_metadata == "RAW" %}
|
||||
<tr title="The available media formats">
|
||||
<td class="hide-on-small-only">Available formats</td>
|
||||
<td><span class="hide-on-med-and-up">Available formats<br></span>
|
||||
@@ -155,7 +157,10 @@
|
||||
Video: <strong>{% if video_format %}{{ video_format }} {% if video_exact %}(exact match){% else %}(fallback){% endif %}{% else %}no match{% endif %}
|
||||
</strong></td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</table>
|
||||
<p>{{ media.source.lightweight_metadata }}</p>
|
||||
<p>{{ media.source }}</p>
|
||||
</div>
|
||||
</div>
|
||||
{% if media.downloaded %}
|
||||
|
||||
@@ -64,5 +64,5 @@
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped %}
|
||||
{% include 'pagination.html' with pagination=sources.paginator filter=source.pk show_skipped=show_skipped only_skipped=only_skipped%}
|
||||
{% endblock %}
|
||||
|
||||
@@ -43,6 +43,10 @@
|
||||
<td class="hide-on-small-only">Directory</td>
|
||||
<td><span class="hide-on-med-and-up">Directory<br></span><strong>{{ source.directory }}</strong></td>
|
||||
</tr>
|
||||
<tr title="Filter text">
|
||||
<td class="hide-on-small-only">Filter text</td>
|
||||
<td><span class="hide-on-med-and-up">Filter text<br></span><strong>{{ source.filter_text }}</strong></td>
|
||||
</tr>
|
||||
<tr title="Media file name format to use for saving files">
|
||||
<td class="hide-on-small-only">Media format</td>
|
||||
<td><span class="hide-on-med-and-up">Media format<br></span><strong>{{ source.media_format }}</strong></td>
|
||||
@@ -115,6 +119,10 @@
|
||||
<td class="hide-on-small-only">Write JSON?</td>
|
||||
<td><span class="hide-on-med-and-up">Write JSON?<br></span><strong>{% if source.write_json %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
</tr>
|
||||
<tr title="Delete media that is no longer on this playlist?">
|
||||
<td class="hide-on-small-only">Delete removed media</td>
|
||||
<td><span class="hide-on-med-and-up">Delete removed media<br></span><strong>{% if source.delete_removed_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
</tr>
|
||||
{% if source.delete_old_media and source.days_to_keep > 0 %}
|
||||
<tr title="Days after which your media from this source will be locally deleted">
|
||||
<td class="hide-on-small-only">Delete old media</td>
|
||||
@@ -178,7 +186,17 @@
|
||||
<td><span class="hide-on-med-and-up">{{ _("Subs langs?") }}:</span><strong>{{source.sub_langs}}</strong></td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% if source.lightweight_metadata %}
|
||||
<tr title="{{ _('Are auto subs accepted?') }}">
|
||||
<td class="hide-on-small-only">{{ _("Auto-generated subtitles?") }}:</td>
|
||||
<td><span class="hide-on-med-and-up">{{ _("Auto-generated subtitles?") }}:</span><strong><i class="fas {% if source.auto_subtitles %}fa-check{% else %}fa-times{% endif %}"></i></strong></td>
|
||||
</tr>
|
||||
<tr title="{{ _('Subs langs?') }}">
|
||||
<td class="hide-on-small-only">{{ _("Subs langs?") }}:</td>
|
||||
<td><span class="hide-on-med-and-up">{{ _("Subs langs?") }}:</span><strong>{{source.sub_langs}}</strong></td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -175,6 +175,7 @@ class FrontEndTestCase(TestCase):
|
||||
'directory': 'testdirectory',
|
||||
'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
|
||||
'download_cap': 0,
|
||||
'filter_text':'.*',
|
||||
'index_schedule': 3600,
|
||||
'delete_old_media': False,
|
||||
'days_to_keep': 14,
|
||||
@@ -217,6 +218,7 @@ class FrontEndTestCase(TestCase):
|
||||
'directory': 'testdirectory',
|
||||
'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
|
||||
'download_cap': 0,
|
||||
'filter_text':'.*',
|
||||
'index_schedule': Source.IndexSchedule.EVERY_HOUR,
|
||||
'delete_old_media': False,
|
||||
'days_to_keep': 14,
|
||||
@@ -247,6 +249,7 @@ class FrontEndTestCase(TestCase):
|
||||
'directory': 'testdirectory',
|
||||
'media_format': settings.MEDIA_FORMATSTR_DEFAULT,
|
||||
'download_cap': 0,
|
||||
'filter_text':'.*',
|
||||
'index_schedule': Source.IndexSchedule.EVERY_2_HOURS, # changed
|
||||
'delete_old_media': False,
|
||||
'days_to_keep': 14,
|
||||
@@ -1468,6 +1471,29 @@ class FormatMatchingTestCase(TestCase):
|
||||
self.media.get_best_video_format()
|
||||
self.media.get_best_audio_format()
|
||||
|
||||
def test_is_regex_match(self):
|
||||
|
||||
self.media.metadata = all_test_metadata['boring']
|
||||
expected_matches = {
|
||||
('.*'): (True),
|
||||
('no fancy stuff'): (True),
|
||||
('No fancy stuff'): (False),
|
||||
('(?i)No fancy stuff'): (True), #set case insensitive flag
|
||||
('no'): (True),
|
||||
('Foo'): (False),
|
||||
('^(?!.*fancy).*$'): (False),
|
||||
('^(?!.*funny).*$'): (True),
|
||||
('(?=.*f.*)(?=.{0,2}|.{4,})'): (True),
|
||||
('f{4,}'): (False),
|
||||
('^[^A-Z]*$'): (True),
|
||||
('^[^a-z]*$'): (False),
|
||||
('^[^\\s]*$'): (False)
|
||||
}
|
||||
|
||||
for params, expected in expected_matches.items():
|
||||
self.source.filter_text = params
|
||||
expected_match_result = expected
|
||||
self.assertEqual(self.source.is_regex_match(self.media.title), expected_match_result)
|
||||
|
||||
class TasksTestCase(TestCase):
|
||||
def setUp(self):
|
||||
|
||||
@@ -294,12 +294,13 @@ class ValidateSourceView(FormView):
|
||||
|
||||
class EditSourceMixin:
|
||||
model = Source
|
||||
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
|
||||
fields = ('source_type', 'key', 'name', 'directory', 'filter_text', 'media_format',
|
||||
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
|
||||
'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
|
||||
'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo',
|
||||
'write_json', 'embed_metadata', 'embed_thumbnail', 'enable_sponsorblock',
|
||||
'sponsorblock_categories', 'write_subtitles', 'auto_subtitles', 'sub_langs')
|
||||
'delete_removed_media', 'days_to_keep', 'source_resolution', 'source_vcodec',
|
||||
'source_acodec', 'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails',
|
||||
'write_nfo', 'write_json', 'embed_metadata', 'embed_thumbnail',
|
||||
'enable_sponsorblock', 'sponsorblock_categories', 'write_subtitles',
|
||||
'auto_subtitles', 'sub_langs', 'lightweight_metadata')
|
||||
errors = {
|
||||
'invalid_media_format': _('Invalid media format, the media format contains '
|
||||
'errors or is empty. Check the table at the end of '
|
||||
|
||||
@@ -6,7 +6,7 @@ CONFIG_BASE_DIR = BASE_DIR
|
||||
DOWNLOADS_BASE_DIR = BASE_DIR
|
||||
|
||||
|
||||
VERSION = '0.13.1'
|
||||
VERSION = '0.13.3'
|
||||
SECRET_KEY = ''
|
||||
DEBUG = False
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
Reference in New Issue
Block a user