Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
799c0fce39 | ||
|
|
2f324f28a9 | ||
|
|
895bfe6f87 | ||
|
|
e0669b107d | ||
|
|
0dc201b293 | ||
|
|
82fa0f6bce | ||
|
|
8b93cb4a59 | ||
|
|
647254d7f7 | ||
|
|
3567e20600 | ||
|
|
5348e25303 | ||
|
|
749df3f7bb | ||
|
|
2c2f53e5b2 | ||
|
|
06cfafb803 | ||
|
|
f5a37f2e86 | ||
|
|
36747a47e0 |
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
github: [meeb]
|
||||
24
Pipfile.lock
generated
24
Pipfile.lock
generated
@@ -39,11 +39,11 @@
|
||||
},
|
||||
"django": {
|
||||
"hashes": [
|
||||
"sha256:2d78425ba74c7a1a74b196058b261b9733a8570782f4e2828974777ccca7edf7",
|
||||
"sha256:efa2ab96b33b20c2182db93147a0c3cd7769d418926f9e9f140a60dca7c64ca9"
|
||||
"sha256:169e2e7b4839a7910b393eec127fd7cbae62e80fa55f89c6510426abf673fe5f",
|
||||
"sha256:c6c0462b8b361f8691171af1fb87eceb4442da28477e12200c40420176206ba7"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.1.5"
|
||||
"version": "==3.1.6"
|
||||
},
|
||||
"django-appconf": {
|
||||
"hashes": [
|
||||
@@ -172,10 +172,10 @@
|
||||
},
|
||||
"pytz": {
|
||||
"hashes": [
|
||||
"sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4",
|
||||
"sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"
|
||||
"sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da",
|
||||
"sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"
|
||||
],
|
||||
"version": "==2020.5"
|
||||
"version": "==2021.1"
|
||||
},
|
||||
"rcssmin": {
|
||||
"hashes": [
|
||||
@@ -225,10 +225,10 @@
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
|
||||
"sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
|
||||
"sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80",
|
||||
"sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"
|
||||
],
|
||||
"version": "==1.26.2"
|
||||
"version": "==1.26.3"
|
||||
},
|
||||
"whitenoise": {
|
||||
"hashes": [
|
||||
@@ -240,11 +240,11 @@
|
||||
},
|
||||
"youtube-dl": {
|
||||
"hashes": [
|
||||
"sha256:8f421ca8394d2529e06225e44ec66538d2a28f6f340c03065776894bf3d24ea6",
|
||||
"sha256:acf74701a31b6c3d06f9d4245a46ba8fb6c378931681177412043c6e8276fee7"
|
||||
"sha256:b390cddbd4d605bd887d0d4063988cef0fa13f916d2e1e3564badbb22504d754",
|
||||
"sha256:e7d48cd42f3081e1e0064e69f31f2856508ef31c0fc80eeebd8e70c6a031a24d"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2021.1.16"
|
||||
"version": "==2021.2.10"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
|
||||
18
README.md
18
README.md
@@ -22,7 +22,7 @@ hopefully, quite reliable.
|
||||
# Latest container image
|
||||
|
||||
```yaml
|
||||
ghcr.io/meeb/tubesync:v0.8
|
||||
ghcr.io/meeb/tubesync:v0.9
|
||||
```
|
||||
|
||||
**NOTE: the `:latest` tag does exist, but will contain in-development commits and may
|
||||
@@ -102,7 +102,7 @@ Finally, download and run the container:
|
||||
|
||||
```bash
|
||||
# Pull a versioned image
|
||||
$ docker pull ghcr.io/meeb/tubesync:v0.8
|
||||
$ docker pull ghcr.io/meeb/tubesync:v0.9
|
||||
# Start the container using your user ID and group ID
|
||||
$ docker run \
|
||||
-d \
|
||||
@@ -113,7 +113,7 @@ $ docker run \
|
||||
-v /some/directory/tubesync-config:/config \
|
||||
-v /some/directory/tubesync-downloads:/downloads \
|
||||
-p 4848:4848 \
|
||||
ghcr.io/meeb/tubesync:v0.8
|
||||
ghcr.io/meeb/tubesync:v0.9
|
||||
```
|
||||
|
||||
Once running, open `http://localhost:4848` in your browser and you should see the
|
||||
@@ -125,7 +125,7 @@ Alternatively, for Docker Compose, you can use something like:
|
||||
|
||||
```yaml
|
||||
tubesync:
|
||||
image: ghcr.io/meeb/tubesync:v0.8
|
||||
image: ghcr.io/meeb/tubesync:v0.9
|
||||
container_name: tubesync
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
@@ -196,6 +196,16 @@ $ docker logs --follow tubesync
|
||||
```
|
||||
|
||||
|
||||
# Advanced usage guides
|
||||
|
||||
Once you're happy using TubeSync there are some advanced usage guides for more complex
|
||||
and less common features:
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
|
||||
# Warnings
|
||||
|
||||
### 1. Index frequency
|
||||
|
||||
@@ -19,8 +19,8 @@ chown -R app:app /app/common/static && \
|
||||
chmod -R 0750 /app/common/static && \
|
||||
chown -R app:app /app/static && \
|
||||
chmod -R 0750 /app/static && \
|
||||
find /app -type f -exec chmod 640 {} \; && \
|
||||
chmod +x /app/healthcheck.py
|
||||
find /app -type f ! -iname healthcheck.py -exec chmod 640 {} \; && \
|
||||
chmod 0755 /app/healthcheck.py
|
||||
|
||||
# Run migrations
|
||||
exec s6-setuidgid app \
|
||||
|
||||
37
docs/create-missing-metadata.md
Normal file
37
docs/create-missing-metadata.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# TubeSync
|
||||
|
||||
## Advanced usage guide - creating missing metadata
|
||||
|
||||
This is a new feature in v0.9 of TubeSync and later. It allows you to create or
|
||||
re-create missing metadata in your TubeSync download directories for missing `nfo`
|
||||
files and thumbnails.
|
||||
|
||||
If you add a source with "write NFO files" or "copy thumbnails" disabled, download
|
||||
some media and then update the source to write NFO files or copy thumbnails then
|
||||
TubeSync will not automatically retroactively attempt to copy or create your missing
|
||||
metadata files. You can use a special one-off command to manually write missing
|
||||
metadata files to the correct locations.
|
||||
|
||||
## Requirements
|
||||
|
||||
You have added a source without metadata writing enabled, downloaded some media, then
|
||||
updated the source to enable metadata writing.
|
||||
|
||||
## Steps
|
||||
|
||||
### 1. Run the batch metadata sync command
|
||||
|
||||
Execute the following Django command:
|
||||
|
||||
`./manage.py sync-missing-metadata`
|
||||
|
||||
When deploying TubeSync inside a container, you can execute this with:
|
||||
|
||||
`docker exec -ti tubesync python3 /app/manage.py sync-missing-metadata`
|
||||
|
||||
This command will log what its doing to the terminal when you run it.
|
||||
|
||||
Internally, this command loops over all your sources which have been saved with
|
||||
"write NFO files" or "copy thumbnails" enabled. Then, loops over all media saved to
|
||||
that source and confirms that the appropriate thumbnail files have been copied over and
|
||||
the NFO file has been written if enabled.
|
||||
81
docs/import-existing-media.md
Normal file
81
docs/import-existing-media.md
Normal file
@@ -0,0 +1,81 @@
|
||||
# TubeSync
|
||||
|
||||
## Advanced usage guide - importing existing media
|
||||
|
||||
This is a new feature in v0.9 of TubeSync and later. It allows you to mark existing
|
||||
downloaded media as "downloaded" in TubeSync. You can use this feature if, for example,
|
||||
you already have an extensive catalogue of downloaded media which you want to mark
|
||||
as downloaded into TubeSync so TubeSync doesn't re-download media you already have.
|
||||
|
||||
## Requirements
|
||||
|
||||
Your existing downloaded media MUST contain the unique ID. For YouTube videos, this is
|
||||
means the YouTube video ID MUST be in the filename.
|
||||
|
||||
Supported extensions to be imported are .m4a, .ogg, .mkv, .mp3, .mp4 and .avi. Your
|
||||
media you want to import must end in one of these file extensions.
|
||||
|
||||
## Caveats
|
||||
|
||||
As TubeSync does not probe media and your existing media may be re-encoded or in
|
||||
different formats to what is available in the current media metadata there is no way
|
||||
for TubeSync to know what codecs, resolution, bitrate etc. your imported media is in.
|
||||
Any manually imported existing local media will display blank boxes for this
|
||||
information on the TubeSync interface as it's unavailable.
|
||||
|
||||
## Steps
|
||||
|
||||
### 1. Add your source to TubeSync
|
||||
|
||||
Add your source to TubeSync, such as a YouTube channel. **Make sure you untick the
|
||||
"download media" checkbox.**
|
||||
|
||||
This will allow TubeSync to index all the available media on your source, but won't
|
||||
start downloading any media.
|
||||
|
||||
### 2. Wait
|
||||
|
||||
Wait for all the media on your source to be indexed. This may take some time.
|
||||
|
||||
### 3. Move your existing media into TubeSync
|
||||
|
||||
You now need to move your existing media into TubeSync. You need to move the media
|
||||
files into the correct download directories created by TubeSync. For example, if you
|
||||
have downloaded videos for a YouTube channel "TestChannel", you would have added this
|
||||
as a source called TestChannel and in a directory called test-channel in Tubesync. It
|
||||
would have a download directory created on disk at:
|
||||
|
||||
`/path/to/downloads/test-channel`
|
||||
|
||||
You would move all of your pre-existing videos you downloaded outside of TubeSync for
|
||||
this channel into this directory.
|
||||
|
||||
In short, your existing media needs to be moved into the correct TubeSync source
|
||||
directory to be detected.
|
||||
|
||||
This is required so TubeSync can known which Source to link the media to.
|
||||
|
||||
### 4. Run the batch import command
|
||||
|
||||
Execute the following Django command:
|
||||
|
||||
`./manage.py import-existing-media`
|
||||
|
||||
When deploying TubeSync inside a container, you can execute this with:
|
||||
|
||||
`docker exec -ti tubesync python3 /app/manage.py import-existing-media`
|
||||
|
||||
This command will log what its doing to the terminal when you run it.
|
||||
|
||||
Internally, `import-existing-media` looks for the unique media key (for YouTube, this
|
||||
is the YouTube video ID) in the filename and detects the source to link it to based
|
||||
on the directory the media file is inside.
|
||||
|
||||
|
||||
### 5. Re-enable downloading at the source
|
||||
|
||||
Edit your source and re-enable / tick the "download media" option. This will allow
|
||||
TubeSync to download any missing media you did not manually import.
|
||||
|
||||
Note that TubeSync will still get screenshots write `nfo` files etc. for files you
|
||||
manually import if enabled at the source level.
|
||||
@@ -19,7 +19,7 @@ def append_uri_params(uri, params):
|
||||
def clean_filename(filename):
|
||||
if not isinstance(filename, str):
|
||||
raise ValueError(f'filename must be a str, got {type(filename)}')
|
||||
to_scrub = '<>\/:*?"|'
|
||||
to_scrub = '<>\/:*?"|%'
|
||||
for char in to_scrub:
|
||||
filename = filename.replace(char, '')
|
||||
filename = ''.join([c for c in filename if ord(c) > 30])
|
||||
|
||||
0
tubesync/healthcheck.py
Normal file → Executable file
0
tubesync/healthcheck.py
Normal file → Executable file
@@ -7,7 +7,7 @@ class SourceAdmin(admin.ModelAdmin):
|
||||
|
||||
ordering = ('-created',)
|
||||
list_display = ('uuid', 'name', 'source_type', 'last_crawl',
|
||||
'has_failed')
|
||||
'download_media', 'has_failed')
|
||||
readonly_fields = ('uuid', 'created')
|
||||
search_fields = ('uuid', 'key', 'name')
|
||||
|
||||
|
||||
55
tubesync/sync/management/commands/import-existing-media.py
Normal file
55
tubesync/sync/management/commands/import-existing-media.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from common.logger import log
|
||||
from sync.models import Source, Media
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = ('Scans download media directories for media not yet downloaded and ',
|
||||
'marks them as downloaded')
|
||||
extra_extensions = ['mp3', 'mp4', 'avi']
|
||||
|
||||
def handle(self, *args, **options):
|
||||
log.info('Building directory to Source map...')
|
||||
dirmap = {}
|
||||
for s in Source.objects.all():
|
||||
dirmap[s.directory_path] = s
|
||||
log.info(f'Scanning sources...')
|
||||
file_extensions = list(Source.EXTENSIONS) + self.extra_extensions
|
||||
for sourceroot, source in dirmap.items():
|
||||
media = list(Media.objects.filter(source=source, downloaded=False,
|
||||
skip=False))
|
||||
if not media:
|
||||
log.info(f'Source "{source}" has no missing media')
|
||||
continue
|
||||
log.info(f'Scanning Source "{source}" directory for media to '
|
||||
f'import: {sourceroot}, looking for {len(media)} '
|
||||
f'undownloaded and unskipped items')
|
||||
on_disk = []
|
||||
for (root, dirs, files) in os.walk(sourceroot):
|
||||
rootpath = Path(root)
|
||||
for filename in files:
|
||||
filepart, ext = os.path.splitext(filename)
|
||||
if ext.startswith('.'):
|
||||
ext = ext[1:]
|
||||
ext = ext.strip().lower()
|
||||
if ext not in file_extensions:
|
||||
continue
|
||||
on_disk.append(str(rootpath / filename))
|
||||
filemap = {}
|
||||
for item in media:
|
||||
for filepath in on_disk:
|
||||
if item.key in filepath:
|
||||
# The unique item key is in the file name on disk, map it to
|
||||
# the undownloaded media item
|
||||
filemap[filepath] = item
|
||||
continue
|
||||
for filepath, item in filemap.items():
|
||||
log.info(f'Matched on-disk file: {filepath} '
|
||||
f'to media item: {item.source} / {item}')
|
||||
item.media_file.name = filepath
|
||||
item.downloaded = True
|
||||
item.save()
|
||||
log.info('Done')
|
||||
34
tubesync/sync/management/commands/sync-missing-metadata.py
Normal file
34
tubesync/sync/management/commands/sync-missing-metadata.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import os
|
||||
from shutil import copyfile
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models import Q
|
||||
from common.logger import log
|
||||
from sync.models import Source, Media
|
||||
from sync.utils import write_text_file
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = 'Syncs missing metadata (such as nfo files) if source settings are updated'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
log.info('Syncing missing metadata...')
|
||||
sources = Source.objects.filter(Q(copy_thumbnails=True) | Q(write_nfo=True))
|
||||
for source in sources.order_by('name'):
|
||||
log.info(f'Finding media for source: {source}')
|
||||
for item in Media.objects.filter(source=source, downloaded=True):
|
||||
log.info(f'Checking media for missing metadata: {source} / {item}')
|
||||
thumbpath = item.thumbpath
|
||||
if not thumbpath.is_file():
|
||||
if item.thumb:
|
||||
log.info(f'Copying missing thumbnail from: {item.thumb.path} '
|
||||
f'to: {thumbpath}')
|
||||
copyfile(item.thumb.path, thumbpath)
|
||||
else:
|
||||
log.error(f'Tried to copy missing thumbnail for {item} but '
|
||||
f'the thumbnail has not been downloaded')
|
||||
nfopath = item.nfopath
|
||||
if not nfopath.is_file():
|
||||
log.info(f'Writing missing NFO file: {nfopath}')
|
||||
write_text_file(nfopath, item.nfoxml)
|
||||
log.info('Done')
|
||||
30
tubesync/sync/migrations/0009_auto_20210218_0442.py
Normal file
30
tubesync/sync/migrations/0009_auto_20210218_0442.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 3.1.6 on 2021-02-18 04:42
|
||||
|
||||
import django.core.files.storage
|
||||
from django.db import migrations, models
|
||||
import sync.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('sync', '0008_source_download_cap'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='source',
|
||||
name='download_media',
|
||||
field=models.BooleanField(default=True, help_text='Download media from this source, if not selected the source will only be indexed', verbose_name='download media'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='media',
|
||||
name='media_file',
|
||||
field=models.FileField(blank=True, help_text='Media file', max_length=200, null=True, storage=django.core.files.storage.FileSystemStorage(location='/home/meeb/Repos/github.com/meeb/tubesync/tubesync/downloads'), upload_to=sync.models.get_media_file_path, verbose_name='media file'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='source',
|
||||
name='media_format',
|
||||
field=models.CharField(default='{yyyymmdd}_{source}_{title}_{key}_{format}.{ext}', help_text='File format to use for saving files, detailed options at bottom of page.', max_length=200, verbose_name='media format'),
|
||||
),
|
||||
]
|
||||
@@ -101,6 +101,11 @@ class Source(models.Model):
|
||||
(FALLBACK_NEXT_BEST_HD, _('Get next best resolution but at least HD'))
|
||||
)
|
||||
|
||||
EXTENSION_M4A = 'm4a'
|
||||
EXTENSION_OGG = 'ogg'
|
||||
EXTENSION_MKV = 'mkv'
|
||||
EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV)
|
||||
|
||||
# Fontawesome icons used for the source on the front end
|
||||
ICONS = {
|
||||
SOURCE_TYPE_YOUTUBE_CHANNEL: '<i class="fab fa-youtube"></i>',
|
||||
@@ -113,6 +118,12 @@ class Source(models.Model):
|
||||
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}',
|
||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||
}
|
||||
# Format used to create indexable URLs
|
||||
INDEX_URLS = {
|
||||
SOURCE_TYPE_YOUTUBE_CHANNEL: 'https://www.youtube.com/c/{key}/videos',
|
||||
SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/channel/{key}/videos',
|
||||
SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/playlist?list={key}',
|
||||
}
|
||||
# Callback functions to get a list of media from the source
|
||||
INDEXERS = {
|
||||
SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||
@@ -210,6 +221,11 @@ class Source(models.Model):
|
||||
default=IndexSchedule.EVERY_6_HOURS,
|
||||
help_text=_('Schedule of how often to index the source for new media')
|
||||
)
|
||||
download_media = models.BooleanField(
|
||||
_('download media'),
|
||||
default=True,
|
||||
help_text=_('Download media from this source, if not selected the source will only be indexed')
|
||||
)
|
||||
download_cap = models.IntegerField(
|
||||
_('download cap'),
|
||||
choices=CapChoices.choices,
|
||||
@@ -328,23 +344,32 @@ class Source(models.Model):
|
||||
'''
|
||||
if self.is_audio:
|
||||
if self.source_acodec == self.SOURCE_ACODEC_MP4A:
|
||||
return 'm4a'
|
||||
return self.EXTENSION_M4A
|
||||
elif self.source_acodec == self.SOURCE_ACODEC_OPUS:
|
||||
return 'ogg'
|
||||
return self.EXTENSION_OGG
|
||||
else:
|
||||
raise ValueError('Unable to choose audio extension, uknown acodec')
|
||||
else:
|
||||
return 'mkv'
|
||||
return self.EXTENSION_MKV
|
||||
|
||||
@classmethod
|
||||
def create_url(obj, source_type, key):
|
||||
url = obj.URLS.get(source_type)
|
||||
return url.format(key=key)
|
||||
|
||||
@classmethod
|
||||
def create_index_url(obj, source_type, key):
|
||||
url = obj.INDEX_URLS.get(source_type)
|
||||
return url.format(key=key)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return Source.create_url(self.source_type, self.key)
|
||||
|
||||
@property
|
||||
def index_url(self):
|
||||
return Source.create_index_url(self.source_type, self.key)
|
||||
|
||||
@property
|
||||
def format_summary(self):
|
||||
if self.source_resolution == Source.SOURCE_RESOLUTION_AUDIO:
|
||||
@@ -437,25 +462,10 @@ class Source(models.Model):
|
||||
indexer = self.INDEXERS.get(self.source_type, None)
|
||||
if not callable(indexer):
|
||||
raise Exception(f'Source type f"{self.source_type}" has no indexer')
|
||||
response = indexer(self.url)
|
||||
|
||||
# Account for nested playlists, such as a channel of playlists of playlists
|
||||
def _recurse_playlists(playlist):
|
||||
videos = []
|
||||
if not playlist:
|
||||
return videos
|
||||
entries = playlist.get('entries', [])
|
||||
for entry in entries:
|
||||
if not entry:
|
||||
continue
|
||||
subentries = entry.get('entries', [])
|
||||
if subentries:
|
||||
videos = videos + _recurse_playlists(entry)
|
||||
else:
|
||||
videos.append(entry)
|
||||
return videos
|
||||
|
||||
return _recurse_playlists(response)
|
||||
response = indexer(self.index_url)
|
||||
if not isinstance(response, dict):
|
||||
return []
|
||||
return response.get('entries', [])
|
||||
|
||||
|
||||
def get_media_thumb_path(instance, filename):
|
||||
@@ -481,6 +491,12 @@ class Media(models.Model):
|
||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: 'https://www.youtube.com/watch?v={key}',
|
||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: 'https://www.youtube.com/watch?v={key}',
|
||||
}
|
||||
# Callback functions to get a list of media from the source
|
||||
INDEXERS = {
|
||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: get_youtube_media_info,
|
||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: get_youtube_media_info,
|
||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: get_youtube_media_info,
|
||||
}
|
||||
# Maps standardised names to names used in source metdata
|
||||
METADATA_FIELDS = {
|
||||
'upload_date': {
|
||||
@@ -558,14 +574,18 @@ class Media(models.Model):
|
||||
STATE_SCHEDULED = 'scheduled'
|
||||
STATE_DOWNLOADING = 'downloading'
|
||||
STATE_DOWNLOADED = 'downloaded'
|
||||
STATE_SKIPPED = 'skipped'
|
||||
STATE_DISABLED_AT_SOURCE = 'source-disabled'
|
||||
STATE_ERROR = 'error'
|
||||
STATES = (STATE_UNKNOWN, STATE_SCHEDULED, STATE_DOWNLOADING, STATE_DOWNLOADED,
|
||||
STATE_ERROR)
|
||||
STATE_SKIPPED, STATE_DISABLED_AT_SOURCE, STATE_ERROR)
|
||||
STATE_ICONS = {
|
||||
STATE_UNKNOWN: '<i class="far fa-question-circle" title="Unknown download state"></i>',
|
||||
STATE_SCHEDULED: '<i class="far fa-clock" title="Scheduled to download"></i>',
|
||||
STATE_DOWNLOADING: '<i class="fas fa-download" title="Downloading now"></i>',
|
||||
STATE_DOWNLOADED: '<i class="far fa-check-circle" title="Downloaded"></i>',
|
||||
STATE_SKIPPED: '<i class="fas fa-exclamation-circle" title="Skipped"></i>',
|
||||
STATE_DISABLED_AT_SOURCE: '<i class="fas fa-stop-circle" title="Media downloading disabled at source"></i>',
|
||||
STATE_ERROR: '<i class="fas fa-exclamation-triangle" title="Error downloading"></i>',
|
||||
}
|
||||
|
||||
@@ -904,6 +924,10 @@ class Media(models.Model):
|
||||
'hdr': display_format['hdr'],
|
||||
}
|
||||
|
||||
@property
|
||||
def has_metadata(self):
|
||||
return self.metadata is not None
|
||||
|
||||
@property
|
||||
def loaded_metadata(self):
|
||||
try:
|
||||
@@ -976,8 +1000,12 @@ class Media(models.Model):
|
||||
def votes(self):
|
||||
field = self.get_metadata_field('upvotes')
|
||||
upvotes = self.loaded_metadata.get(field, 0)
|
||||
if not isinstance(upvotes, int):
|
||||
upvotes = 0
|
||||
field = self.get_metadata_field('downvotes')
|
||||
downvotes = self.loaded_metadata.get(field, 0)
|
||||
if not isinstance(downvotes, int):
|
||||
downvotes = 0
|
||||
return upvotes + downvotes
|
||||
|
||||
@property
|
||||
@@ -1163,6 +1191,10 @@ class Media(models.Model):
|
||||
return self.STATE_ERROR
|
||||
else:
|
||||
return self.STATE_SCHEDULED
|
||||
if self.skip:
|
||||
return self.STATE_SKIPPED
|
||||
if not self.source.download_media:
|
||||
return self.STATE_DISABLED_AT_SOURCE
|
||||
return self.STATE_UNKNOWN
|
||||
|
||||
def get_download_state_icon(self, task=None):
|
||||
@@ -1180,6 +1212,16 @@ class Media(models.Model):
|
||||
# Return the download paramaters
|
||||
return format_str, self.source.extension
|
||||
|
||||
def index_metadata(self):
|
||||
'''
|
||||
Index the media metadata returning a dict of info.
|
||||
'''
|
||||
indexer = self.INDEXERS.get(self.source.source_type, None)
|
||||
if not callable(indexer):
|
||||
raise Exception(f'Meida with source type f"{self.source.source_type}" '
|
||||
f'has no indexer')
|
||||
return indexer(self.url)
|
||||
|
||||
|
||||
class MediaServer(models.Model):
|
||||
'''
|
||||
|
||||
@@ -8,8 +8,9 @@ from background_task.models import Task
|
||||
from common.logger import log
|
||||
from .models import Source, Media, MediaServer
|
||||
from .tasks import (delete_task_by_source, delete_task_by_media, index_source_task,
|
||||
download_media_thumbnail, map_task_to_instance,
|
||||
check_source_directory_exists, download_media, rescan_media_server)
|
||||
download_media_thumbnail, download_media_metadata,
|
||||
map_task_to_instance, check_source_directory_exists,
|
||||
download_media, rescan_media_server)
|
||||
from .utils import delete_file
|
||||
|
||||
|
||||
@@ -93,16 +94,27 @@ def task_task_failed(sender, task_id, completed_task, **kwargs):
|
||||
def media_post_save(sender, instance, created, **kwargs):
|
||||
# Triggered after media is saved, Recalculate the "can_download" flag, this may
|
||||
# need to change if the source specifications have been changed
|
||||
post_save.disconnect(media_post_save, sender=Media)
|
||||
if instance.get_format_str():
|
||||
if not instance.can_download:
|
||||
instance.can_download = True
|
||||
instance.save()
|
||||
else:
|
||||
if instance.can_download:
|
||||
instance.can_download = False
|
||||
instance.save()
|
||||
post_save.connect(media_post_save, sender=Media)
|
||||
if instance.metadata:
|
||||
post_save.disconnect(media_post_save, sender=Media)
|
||||
if instance.get_format_str():
|
||||
if not instance.can_download:
|
||||
instance.can_download = True
|
||||
instance.save()
|
||||
else:
|
||||
if instance.can_download:
|
||||
instance.can_download = False
|
||||
instance.save()
|
||||
post_save.connect(media_post_save, sender=Media)
|
||||
# If the media is missing metadata schedule it to be downloaded
|
||||
if not instance.metadata:
|
||||
log.info(f'Scheduling task to download metadata for: {instance.url}')
|
||||
verbose_name = _('Downloading metadata for "{}"')
|
||||
download_media_metadata(
|
||||
str(instance.pk),
|
||||
priority=10,
|
||||
verbose_name=verbose_name.format(instance.pk),
|
||||
remove_existing_tasks=True
|
||||
)
|
||||
# If the media is missing a thumbnail schedule it to be downloaded
|
||||
if not instance.thumb_file_exists:
|
||||
instance.thumb = None
|
||||
@@ -124,7 +136,8 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
if not instance.media_file_exists:
|
||||
instance.downloaded = False
|
||||
instance.media_file = None
|
||||
if not instance.downloaded and instance.can_download and not instance.skip:
|
||||
if (not instance.downloaded and instance.can_download and not instance.skip
|
||||
and instance.source.download_media):
|
||||
delete_task_by_media('sync.tasks.download_media', (str(instance.pk),))
|
||||
verbose_name = _('Downloading media for "{}"')
|
||||
download_media(
|
||||
|
||||
@@ -179,30 +179,6 @@ def index_source_task(source_id):
|
||||
except Media.DoesNotExist:
|
||||
media = Media(key=key)
|
||||
media.source = source
|
||||
media.metadata = json.dumps(video)
|
||||
upload_date = media.upload_date
|
||||
# Media must have a valid upload date
|
||||
if upload_date:
|
||||
media.published = timezone.make_aware(upload_date)
|
||||
else:
|
||||
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
||||
continue
|
||||
# If the source has a download cap date check the upload date is allowed
|
||||
max_cap_age = source.download_cap_date
|
||||
if max_cap_age:
|
||||
if media.published < max_cap_age:
|
||||
# Media was published after the cap date, skip it
|
||||
log.warn(f'Media: {source} / {media} is older than cap age '
|
||||
f'{max_cap_age}, skipping')
|
||||
continue
|
||||
# If the source has a cut-off check the upload date is within the allowed delta
|
||||
if source.delete_old_media and source.days_to_keep > 0:
|
||||
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
||||
if media.published < delta:
|
||||
# Media was published after the cutoff date, skip it
|
||||
log.warn(f'Media: {source} / {media} is older than '
|
||||
f'{source.days_to_keep} days, skipping')
|
||||
continue
|
||||
try:
|
||||
media.save()
|
||||
log.info(f'Indexed media: {source} / {media}')
|
||||
@@ -234,6 +210,56 @@ def check_source_directory_exists(source_id):
|
||||
source.make_directory()
|
||||
|
||||
|
||||
@background(schedule=0)
|
||||
def download_media_metadata(media_id):
|
||||
'''
|
||||
Downloads the metadata for a media item.
|
||||
'''
|
||||
try:
|
||||
media = Media.objects.get(pk=media_id)
|
||||
except Media.DoesNotExist:
|
||||
# Task triggered but the media no longer exists, do nothing
|
||||
log.error(f'Task download_media_metadata(pk={media_id}) called but no '
|
||||
f'media exists with ID: {media_id}')
|
||||
return
|
||||
source = media.source
|
||||
metadata = media.index_metadata()
|
||||
media.metadata = json.dumps(metadata)
|
||||
upload_date = media.upload_date
|
||||
# Media must have a valid upload date
|
||||
if upload_date:
|
||||
media.published = timezone.make_aware(upload_date)
|
||||
else:
|
||||
log.error(f'Media has no upload date, skipping: {source} / {media}')
|
||||
media.skip = True
|
||||
# If the source has a download cap date check the upload date is allowed
|
||||
max_cap_age = source.download_cap_date
|
||||
if max_cap_age:
|
||||
if media.published < max_cap_age:
|
||||
# Media was published after the cap date, skip it
|
||||
log.warn(f'Media: {source} / {media} is older than cap age '
|
||||
f'{max_cap_age}, skipping')
|
||||
media.skip = True
|
||||
# If the source has a cut-off check the upload date is within the allowed delta
|
||||
if source.delete_old_media and source.days_to_keep > 0:
|
||||
delta = timezone.now() - timedelta(days=source.days_to_keep)
|
||||
if media.published < delta:
|
||||
# Media was published after the cutoff date, skip it
|
||||
log.warn(f'Media: {source} / {media} is older than '
|
||||
f'{source.days_to_keep} days, skipping')
|
||||
media.skip = True
|
||||
# Check we can download the media item
|
||||
if not media.skip:
|
||||
if media.get_format_str():
|
||||
media.can_download = True
|
||||
else:
|
||||
media.can_download = False
|
||||
# Save the media
|
||||
media.save()
|
||||
log.info(f'Saved {len(media.metadata)} bytes of metadata for: '
|
||||
f'{source} / {media_id}')
|
||||
|
||||
|
||||
@background(schedule=0)
|
||||
def download_media_thumbnail(media_id, url):
|
||||
'''
|
||||
@@ -282,6 +308,17 @@ def download_media(media_id):
|
||||
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but it '
|
||||
f'is now marked to be skipped, not downloading')
|
||||
return
|
||||
if media.downloaded and media.media_file:
|
||||
# Media has been marked as downloaded before the download_media task was fired,
|
||||
# skip it
|
||||
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but it '
|
||||
f'has already been marked as downloaded, not downloading again')
|
||||
return
|
||||
if not media.source.download_media:
|
||||
log.warn(f'Download task triggeredd media: {media} (UUID: {media.pk}) but the '
|
||||
f'source {media.source} has since been marked to not download media, '
|
||||
f'not downloading')
|
||||
return
|
||||
filepath = media.filepath
|
||||
log.info(f'Downloading media: {media} (UUID: {media.pk}) to: "{filepath}"')
|
||||
format_str, container = media.download_media()
|
||||
|
||||
@@ -64,8 +64,14 @@
|
||||
<td class="hide-on-small-only">Fallback</td>
|
||||
<td><span class="hide-on-med-and-up">Fallback<br></span><strong>{{ media.source.get_fallback_display }}</strong></td>
|
||||
</tr>
|
||||
{% if not media.source.download_media %}
|
||||
<tr title="Is media marked to be downloaded at the source?">
|
||||
<td class="hide-on-small-only">Source download?</td>
|
||||
<td><span class="hide-on-med-and-up">Source download?<br></span><strong>{% if media.source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% if media.skip %}
|
||||
<tr title="Has the media been downloaded?">
|
||||
<tr title="Is the media marked to be skipped?">
|
||||
<td class="hide-on-small-only">Skipping?</td>
|
||||
<td><span class="hide-on-med-and-up">Skipping?<br></span><strong>{% if media.skip %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
</tr>
|
||||
@@ -109,7 +115,7 @@
|
||||
{% else %}
|
||||
<tr title="Can the media be downloaded?">
|
||||
<td class="hide-on-small-only">Can download?</td>
|
||||
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if youtube_dl_format %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
<td><span class="hide-on-med-and-up">Can download?<br></span><strong>{% if media.can_download %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
<tr title="The available media formats">
|
||||
|
||||
@@ -24,8 +24,12 @@
|
||||
{% else %}
|
||||
{% if m.skip %}
|
||||
<span class="error-text"><i class="fas fa-times" title="Skipping media"></i> Skipped</span>
|
||||
{% elif not m.source.download_media %}
|
||||
<span class="error-text"><i class="fas fa-times" title="Not downloading media for this source"></i> Disabled at source</span>
|
||||
{% elif not m.has_metadata %}
|
||||
<i class="far fa-clock" title="Waiting for metadata"></i> Fetching metadata
|
||||
{% elif m.can_download %}
|
||||
<i class="far fa-clock" title="Waiting to download or downloading"></i> {{ m.published|date:'Y-m-d' }}
|
||||
<i class="far fa-clock" title="Waiting to download or downloading"></i> Downloading
|
||||
{% else %}
|
||||
<span class="error-text"><i class="fas fa-exclamation-triangle" title="No matching formats to download"></i> No matching formats</span>
|
||||
{% endif %}
|
||||
|
||||
@@ -61,6 +61,10 @@
|
||||
<td class="hide-on-small-only">Index schedule</td>
|
||||
<td><span class="hide-on-med-and-up">Index schedule<br></span><strong>{{ source.get_index_schedule_display }}</strong></td>
|
||||
</tr>
|
||||
<tr title="Download media from this source">
|
||||
<td class="hide-on-small-only">Download media?</td>
|
||||
<td><span class="hide-on-med-and-up">Download media?<br></span><strong>{% if source.download_media %}<i class="fas fa-check"></i>{% else %}<i class="fas fa-times"></i>{% endif %}</strong></td>
|
||||
</tr>
|
||||
<tr title="When then source was created locally in TubeSync">
|
||||
<td class="hide-on-small-only">Created</td>
|
||||
<td><span class="hide-on-med-and-up">Created<br></span><strong>{{ source.created|date:'Y-m-d H:i:s' }}</strong></td>
|
||||
|
||||
@@ -274,9 +274,9 @@ class AddSourceView(CreateView):
|
||||
template_name = 'sync/source-add.html'
|
||||
model = Source
|
||||
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
|
||||
'index_schedule', 'download_cap', 'delete_old_media', 'days_to_keep',
|
||||
'source_resolution', 'source_vcodec', 'source_acodec', 'prefer_60fps',
|
||||
'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
||||
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
|
||||
'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
|
||||
'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
||||
errors = {
|
||||
'invalid_media_format': _('Invalid media format, the media format contains '
|
||||
'errors or is empty. Check the table at the end of '
|
||||
@@ -365,9 +365,9 @@ class UpdateSourceView(UpdateView):
|
||||
template_name = 'sync/source-update.html'
|
||||
model = Source
|
||||
fields = ('source_type', 'key', 'name', 'directory', 'media_format',
|
||||
'index_schedule', 'download_cap', 'delete_old_media', 'days_to_keep',
|
||||
'source_resolution', 'source_vcodec', 'source_acodec', 'prefer_60fps',
|
||||
'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
||||
'index_schedule', 'download_media', 'download_cap', 'delete_old_media',
|
||||
'days_to_keep', 'source_resolution', 'source_vcodec', 'source_acodec',
|
||||
'prefer_60fps', 'prefer_hdr', 'fallback', 'copy_thumbnails', 'write_nfo')
|
||||
errors = {
|
||||
'invalid_media_format': _('Invalid media format, the media format contains '
|
||||
'errors or is empty. Check the table at the end of '
|
||||
|
||||
@@ -37,7 +37,8 @@ def get_media_info(url):
|
||||
'skip_download': True,
|
||||
'forcejson': True,
|
||||
'simulate': True,
|
||||
'logger': log
|
||||
'logger': log,
|
||||
'extract_flat': True,
|
||||
})
|
||||
response = {}
|
||||
with youtube_dl.YoutubeDL(opts) as y:
|
||||
|
||||
@@ -6,7 +6,7 @@ CONFIG_BASE_DIR = BASE_DIR
|
||||
DOWNLOADS_BASE_DIR = BASE_DIR
|
||||
|
||||
|
||||
VERSION = 0.8
|
||||
VERSION = 0.9
|
||||
SECRET_KEY = ''
|
||||
DEBUG = False
|
||||
ALLOWED_HOSTS = []
|
||||
@@ -114,6 +114,9 @@ Disallow: /
|
||||
'''.strip()
|
||||
|
||||
|
||||
X_FRAME_OPTIONS = 'SAMEORIGIN'
|
||||
|
||||
|
||||
HEALTHCHECK_FIREWALL = True
|
||||
HEALTHCHECK_ALLOWED_IPS = ('127.0.0.1',)
|
||||
|
||||
@@ -149,7 +152,7 @@ YOUTUBE_DEFAULTS = {
|
||||
}
|
||||
|
||||
|
||||
MEDIA_FORMATSTR_DEFAULT = '{yyyymmdd}_{source}_{title}_{key}_{format}.{ext}'
|
||||
MEDIA_FORMATSTR_DEFAULT = '{yyyy_mm_dd}_{source}_{title}_{key}_{format}.{ext}'
|
||||
|
||||
|
||||
try:
|
||||
|
||||
Reference in New Issue
Block a user