bugfixes
This commit is contained in:
parent
c37fa68937
commit
ecc0c72cea
@ -106,3 +106,6 @@ There are two bottlenecks with this approach though:
|
|||||||
|
|
||||||
**Progress**
|
**Progress**
|
||||||
- There is a great site whith a huge isrc database [https://isrc.soundexchange.com/](https://isrc.soundexchange.com/).
|
- There is a great site whith a huge isrc database [https://isrc.soundexchange.com/](https://isrc.soundexchange.com/).
|
||||||
|
|
||||||
|
|
||||||
|
https://slavart.gamesdrive.net/
|
@ -5,7 +5,6 @@ from mutagen.easyid3 import EasyID3
|
|||||||
from pydub import AudioSegment
|
from pydub import AudioSegment
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from metadata import database
|
|
||||||
import musify
|
import musify
|
||||||
import youtube_music
|
import youtube_music
|
||||||
|
|
||||||
@ -15,57 +14,29 @@ https://mutagen.readthedocs.io/en/latest/user/id3.html
|
|||||||
|
|
||||||
# to get all valid keys
|
# to get all valid keys
|
||||||
from mutagen.easyid3 import EasyID3
|
from mutagen.easyid3 import EasyID3
|
||||||
|
print("\n".join(EasyID3.valid_keys.keys()))
|
||||||
print(EasyID3.valid_keys.keys())
|
print(EasyID3.valid_keys.keys())
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def write_metadata(row, file_path):
|
|
||||||
if not os.path.exists(file_path):
|
|
||||||
logging.warning("something went really wrong")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# only convert the file to the proper format if mutagen doesn't work with it due to time
|
|
||||||
try:
|
|
||||||
audiofile = EasyID3(file_path)
|
|
||||||
except mutagen.id3.ID3NoHeaderError:
|
|
||||||
AudioSegment.from_file(file_path).export(file_path, format="mp3")
|
|
||||||
audiofile = EasyID3(file_path)
|
|
||||||
|
|
||||||
valid_keys = list(EasyID3.valid_keys.keys())
|
|
||||||
|
|
||||||
for key in list(row.keys()):
|
|
||||||
if key in valid_keys and row[key] is not None:
|
|
||||||
if type(row[key]) != list:
|
|
||||||
row[key] = str(row[key])
|
|
||||||
audiofile[key] = row[key]
|
|
||||||
else:
|
|
||||||
logging.warning(key)
|
|
||||||
|
|
||||||
logging.info("saving")
|
|
||||||
audiofile.save(file_path, v1=2)
|
|
||||||
|
|
||||||
|
|
||||||
def path_stuff(path: str, file_: str):
|
|
||||||
# returns true if it shouldn't be downloaded
|
|
||||||
if os.path.exists(file_):
|
|
||||||
logging.info(f"'{file_}' does already exist, thus not downloading.")
|
|
||||||
return True
|
|
||||||
os.makedirs(path, exist_ok=True)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Download:
|
class Download:
|
||||||
def __init__(self, proxies: dict = None, base_path: str = ""):
|
def __init__(self, database, logger: logging.Logger, proxies: dict = None, base_path: str = ""):
|
||||||
if proxies is not None:
|
if proxies is not None:
|
||||||
musify.set_proxy(proxies)
|
musify.set_proxy(proxies)
|
||||||
|
|
||||||
|
self.database = database
|
||||||
|
self.logger = logger
|
||||||
|
|
||||||
for row in database.get_tracks_to_download():
|
for row in database.get_tracks_to_download():
|
||||||
row['artist'] = [i['name'] for i in row['artists']]
|
row['artist'] = [i['name'] for i in row['artists']]
|
||||||
row['file'] = os.path.join(base_path, row['file'])
|
row['file'] = os.path.join(base_path, row['file'])
|
||||||
row['path'] = os.path.join(base_path, row['path'])
|
row['path'] = os.path.join(base_path, row['path'])
|
||||||
|
|
||||||
if path_stuff(row['path'], row['file']):
|
if self.path_stuff(row['path'], row['file']):
|
||||||
write_metadata(row, row['file'])
|
self.write_metadata(row, row['file'])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
download_success = None
|
download_success = None
|
||||||
@ -76,10 +47,43 @@ class Download:
|
|||||||
download_success = youtube_music.download(row)
|
download_success = youtube_music.download(row)
|
||||||
|
|
||||||
if download_success == -1:
|
if download_success == -1:
|
||||||
logging.warning(f"couldn't download {row.url} from {row.src}")
|
self.logger.warning(f"couldn't download {row['url']} from {row['src']}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
write_metadata(row, row['file'])
|
self.write_metadata(row, row['file'])
|
||||||
|
|
||||||
|
def write_metadata(self, row, file_path):
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
self.logger.warning("something went really wrong")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# only convert the file to the proper format if mutagen doesn't work with it due to time
|
||||||
|
try:
|
||||||
|
audiofile = EasyID3(file_path)
|
||||||
|
except mutagen.id3.ID3NoHeaderError:
|
||||||
|
AudioSegment.from_file(file_path).export(file_path, format="mp3")
|
||||||
|
audiofile = EasyID3(file_path)
|
||||||
|
|
||||||
|
valid_keys = list(EasyID3.valid_keys.keys())
|
||||||
|
|
||||||
|
for key in list(row.keys()):
|
||||||
|
if key in valid_keys and row[key] is not None:
|
||||||
|
if type(row[key]) != list:
|
||||||
|
row[key] = str(row[key])
|
||||||
|
audiofile[key] = row[key]
|
||||||
|
else:
|
||||||
|
self.logger.warning(key)
|
||||||
|
|
||||||
|
self.logger.info("saving")
|
||||||
|
audiofile.save(file_path, v1=2)
|
||||||
|
|
||||||
|
def path_stuff(self, path: str, file_: str):
|
||||||
|
# returns true if it shouldn't be downloaded
|
||||||
|
if os.path.exists(file_):
|
||||||
|
self.logger.info(f"'{file_}' does already exist, thus not downloading.")
|
||||||
|
return True
|
||||||
|
os.makedirs(path, exist_ok=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -3,17 +3,18 @@ import logging
|
|||||||
|
|
||||||
import musify
|
import musify
|
||||||
import youtube_music
|
import youtube_music
|
||||||
from metadata import database
|
|
||||||
|
|
||||||
|
|
||||||
class Download:
|
class Download:
|
||||||
def __init__(self, metadata_csv: str = ".cache1.csv", proxies: dict = None) -> None:
|
def __init__(self, database, logger: logging.Logger, proxies: dict = None) -> None:
|
||||||
|
self.database = database
|
||||||
|
self.logger = logger
|
||||||
if proxies is not None:
|
if proxies is not None:
|
||||||
musify.set_proxy(proxies)
|
musify.set_proxy(proxies)
|
||||||
|
|
||||||
self.urls = []
|
self.urls = []
|
||||||
|
|
||||||
for row in database.get_tracks_to_download():
|
for row in self.database.get_tracks_to_download():
|
||||||
row['artists'] = [artist['name'] for artist in row['artists']]
|
row['artists'] = [artist['name'] for artist in row['artists']]
|
||||||
|
|
||||||
id_ = row['id']
|
id_ = row['id']
|
||||||
@ -36,10 +37,10 @@ class Download:
|
|||||||
self.add_url(musify_url, 'musify', id_)
|
self.add_url(musify_url, 'musify', id_)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logging.warning(f"Didn't find any sources for {row['title']}")
|
self.logger.warning(f"Didn't find any sources for {row['title']}")
|
||||||
|
|
||||||
def add_url(self, url: str, src: str, id_: str):
|
def add_url(self, url: str, src: str, id_: str):
|
||||||
database.set_download_data(id_, url, src)
|
self.database.set_download_data(id_, url, src)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
39
src/main.py
39
src/main.py
@ -1,3 +1,5 @@
|
|||||||
|
from metadata.database import Database
|
||||||
|
from metadata.download import MetadataDownloader
|
||||||
import metadata.download
|
import metadata.download
|
||||||
import metadata.metadata
|
import metadata.metadata
|
||||||
import download_links
|
import download_links
|
||||||
@ -6,12 +8,19 @@ import download
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
TEMP = "temp"
|
TEMP_FOLDER = "music-downloader"
|
||||||
STEP_ONE_CACHE = ".cache1.csv"
|
DATABASE_FILE = "metadata.db"
|
||||||
STEP_TWO_CACHE = ".cache2.csv"
|
DATABASE_STRUCTURE_FILE = "database_structure.sql"
|
||||||
STEP_THREE_CACHE = ".cache3.csv"
|
|
||||||
|
DATABASE_LOGGER = logging.getLogger("database")
|
||||||
|
METADATA_DOWNLOAD_LOGGER = logging.getLogger("metadata-download")
|
||||||
|
URL_DOWNLOAD_LOGGER = logging.getLogger("ling-download")
|
||||||
|
PATH_LOGGER = logging.getLogger("create-paths")
|
||||||
|
DOWNLOAD_LOGGER = logging.getLogger("download")
|
||||||
|
|
||||||
NOT_A_GENRE = ".", "..", "misc_scripts", "Music", "script", ".git", ".idea"
|
NOT_A_GENRE = ".", "..", "misc_scripts", "Music", "script", ".git", ".idea"
|
||||||
MUSIC_DIR = os.path.expanduser('~/Music')
|
MUSIC_DIR = os.path.expanduser('~/Music')
|
||||||
@ -20,6 +29,16 @@ TOR = False
|
|||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
logger.level = logging.DEBUG
|
logger.level = logging.DEBUG
|
||||||
|
|
||||||
|
temp_dir = os.path.join(tempfile.gettempdir(), TEMP_FOLDER)
|
||||||
|
if not os.path.exists(temp_dir):
|
||||||
|
os.mkdir(temp_dir)
|
||||||
|
|
||||||
|
database = Database(os.path.join(temp_dir, DATABASE_FILE),
|
||||||
|
os.path.join(temp_dir, DATABASE_STRUCTURE_FILE), DATABASE_LOGGER,
|
||||||
|
reset_anyways=False)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_existing_genre():
|
def get_existing_genre():
|
||||||
valid_directories = []
|
valid_directories = []
|
||||||
@ -31,7 +50,7 @@ def get_existing_genre():
|
|||||||
|
|
||||||
|
|
||||||
def search_for_metadata(query: str):
|
def search_for_metadata(query: str):
|
||||||
search = metadata.metadata.Search(query=query, temp=TEMP)
|
search = metadata.metadata.Search(query=query)
|
||||||
|
|
||||||
print(search.options)
|
print(search.options)
|
||||||
while True:
|
while True:
|
||||||
@ -86,20 +105,20 @@ def cli(start_at: int = 0):
|
|||||||
if start_at <= 0:
|
if start_at <= 0:
|
||||||
search = search_for_metadata(query=input("initial query: "))
|
search = search_for_metadata(query=input("initial query: "))
|
||||||
logging.info("Starting Downloading of metadata")
|
logging.info("Starting Downloading of metadata")
|
||||||
metadata.download.download(search)
|
metadata_downloader = MetadataDownloader(database, METADATA_DOWNLOAD_LOGGER)
|
||||||
|
metadata_downloader.download(search)
|
||||||
|
|
||||||
if start_at <= 1:
|
if start_at <= 1:
|
||||||
logging.info("Fetching Download Links")
|
logging.info("Fetching Download Links")
|
||||||
download_links.Download(proxies=proxies)
|
download_links.Download(database, METADATA_DOWNLOAD_LOGGER, proxies=proxies)
|
||||||
|
|
||||||
if start_at <= 2:
|
if start_at <= 2:
|
||||||
logging.info("creating Paths")
|
logging.info("creating Paths")
|
||||||
print(genre)
|
url_to_path.UrlPath(database, PATH_LOGGER, genre=genre)
|
||||||
url_to_path.UrlPath(genre=genre)
|
|
||||||
|
|
||||||
if start_at <= 3:
|
if start_at <= 3:
|
||||||
logging.info("starting to download the mp3's")
|
logging.info("starting to download the mp3's")
|
||||||
download.Download(proxies=proxies, base_path=MUSIC_DIR)
|
download.Download(database, DOWNLOAD_LOGGER, proxies=proxies, base_path=MUSIC_DIR)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -98,6 +98,7 @@ class Database:
|
|||||||
musicbrainz_releasetrackid: str,
|
musicbrainz_releasetrackid: str,
|
||||||
musicbrainz_albumid: str,
|
musicbrainz_albumid: str,
|
||||||
feature_aritsts: list,
|
feature_aritsts: list,
|
||||||
|
tracknumber: str = None,
|
||||||
track: str = None,
|
track: str = None,
|
||||||
isrc: str = None
|
isrc: str = None
|
||||||
):
|
):
|
||||||
@ -111,8 +112,8 @@ class Database:
|
|||||||
self.connection.commit()
|
self.connection.commit()
|
||||||
|
|
||||||
# add track
|
# add track
|
||||||
query = "INSERT OR REPLACE INTO track (id, release_id, track, isrc) VALUES (?, ?, ?, ?);"
|
query = "INSERT OR REPLACE INTO track (id, release_id, track, isrc, tracknumber) VALUES (?, ?, ?, ?, ?);"
|
||||||
values = musicbrainz_releasetrackid, musicbrainz_albumid, track, isrc
|
values = musicbrainz_releasetrackid, musicbrainz_albumid, track, isrc, tracknumber
|
||||||
self.cursor.execute(query, values)
|
self.cursor.execute(query, values)
|
||||||
self.connection.commit()
|
self.connection.commit()
|
||||||
|
|
||||||
@ -139,6 +140,8 @@ SELECT DISTINCT
|
|||||||
)
|
)
|
||||||
),
|
),
|
||||||
'id', track.id,
|
'id', track.id,
|
||||||
|
'tracknumber', track.tracknumber,
|
||||||
|
'titlesort ', track.tracknumber,
|
||||||
'musicbrainz_releasetrackid', track.id,
|
'musicbrainz_releasetrackid', track.id,
|
||||||
'musicbrainz_albumid', release_.id,
|
'musicbrainz_albumid', release_.id,
|
||||||
'title', track.track,
|
'title', track.track,
|
||||||
|
56
src/metadata/database_structure.sql
Normal file
56
src/metadata/database_structure.sql
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
DROP TABLE IF EXISTS artist;
|
||||||
|
CREATE TABLE artist (
|
||||||
|
id TEXT PRIMARY KEY NOT NULL,
|
||||||
|
name TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS artist_release_group;
|
||||||
|
CREATE TABLE artist_release_group (
|
||||||
|
artist_id TEXT NOT NULL,
|
||||||
|
release_group_id TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS artist_track;
|
||||||
|
CREATE TABLE artist_track (
|
||||||
|
artist_id TEXT NOT NULL,
|
||||||
|
track_id TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS release_group;
|
||||||
|
CREATE TABLE release_group (
|
||||||
|
id TEXT PRIMARY KEY NOT NULL,
|
||||||
|
albumartist TEXT,
|
||||||
|
albumsort INT,
|
||||||
|
musicbrainz_albumtype TEXT,
|
||||||
|
compilation TEXT,
|
||||||
|
album_artist_id TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS release_;
|
||||||
|
CREATE TABLE release_ (
|
||||||
|
id TEXT PRIMARY KEY NOT NULL,
|
||||||
|
release_group_id TEXT NOT NULL,
|
||||||
|
title TEXT,
|
||||||
|
copyright TEXT,
|
||||||
|
album_status TEXT,
|
||||||
|
language TEXT,
|
||||||
|
year TEXT,
|
||||||
|
date TEXT,
|
||||||
|
country TEXT,
|
||||||
|
barcode TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS track;
|
||||||
|
CREATE TABLE track (
|
||||||
|
id TEXT PRIMARY KEY NOT NULL,
|
||||||
|
downloaded BOOLEAN NOT NULL DEFAULT 0,
|
||||||
|
release_id TEXT NOT NULL,
|
||||||
|
track TEXT,
|
||||||
|
tracknumber TEXT,
|
||||||
|
isrc TEXT,
|
||||||
|
genre TEXT,
|
||||||
|
path TEXT,
|
||||||
|
file TEXT,
|
||||||
|
url TEXT,
|
||||||
|
src TEXT
|
||||||
|
);
|
@ -2,7 +2,11 @@ from typing import List
|
|||||||
import musicbrainzngs
|
import musicbrainzngs
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from object_handeling import get_elem_from_obj, parse_music_brainz_date
|
try:
|
||||||
|
from object_handeling import get_elem_from_obj, parse_music_brainz_date
|
||||||
|
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
from metadata.object_handeling import get_elem_from_obj, parse_music_brainz_date
|
||||||
|
|
||||||
# I don't know if it would be feesable to set up my own mb instance
|
# I don't know if it would be feesable to set up my own mb instance
|
||||||
# https://github.com/metabrainz/musicbrainz-docker
|
# https://github.com/metabrainz/musicbrainz-docker
|
||||||
@ -81,9 +85,10 @@ class MetadataDownloader:
|
|||||||
database,
|
database,
|
||||||
logger,
|
logger,
|
||||||
musicbrainz_releasegroupid: str,
|
musicbrainz_releasegroupid: str,
|
||||||
artists = [],
|
artists=[],
|
||||||
albumsort: int = None,
|
albumsort: int = None,
|
||||||
only_download_distinct_releases: bool = True
|
only_download_distinct_releases: bool = True,
|
||||||
|
fetch_further: bool = True
|
||||||
):
|
):
|
||||||
self.database = database
|
self.database = database
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
@ -117,6 +122,9 @@ class MetadataDownloader:
|
|||||||
|
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
if not fetch_further:
|
||||||
|
return
|
||||||
|
|
||||||
if only_download_distinct_releases:
|
if only_download_distinct_releases:
|
||||||
self.append_distinct_releases(release_datas)
|
self.append_distinct_releases(release_datas)
|
||||||
else:
|
else:
|
||||||
@ -142,7 +150,8 @@ class MetadataDownloader:
|
|||||||
for existing_artist in self.artists:
|
for existing_artist in self.artists:
|
||||||
if artist_id == existing_artist.musicbrainz_artistid:
|
if artist_id == existing_artist.musicbrainz_artistid:
|
||||||
return existing_artist
|
return existing_artist
|
||||||
new_artist = Artist(artist_id, release_groups=[self], new_release_groups=False)
|
new_artist = MetadataDownloader.Artist(self.database, self.logger, artist_id, release_groups=[self],
|
||||||
|
new_release_groups=False)
|
||||||
self.artists.append(new_artist)
|
self.artists.append(new_artist)
|
||||||
return new_artist
|
return new_artist
|
||||||
|
|
||||||
@ -150,7 +159,8 @@ class MetadataDownloader:
|
|||||||
musicbrainz_albumid = get_elem_from_obj(release_data, ['id'])
|
musicbrainz_albumid = get_elem_from_obj(release_data, ['id'])
|
||||||
if musicbrainz_albumid is None:
|
if musicbrainz_albumid is None:
|
||||||
return
|
return
|
||||||
self.releases.append(MetadataDownloader.Release(self.database, self.logger, musicbrainz_albumid, release_group=self))
|
self.releases.append(
|
||||||
|
MetadataDownloader.Release(self.database, self.logger, musicbrainz_albumid, release_group=self))
|
||||||
|
|
||||||
def append_distinct_releases(self, release_datas: List[dict]):
|
def append_distinct_releases(self, release_datas: List[dict]):
|
||||||
titles = {}
|
titles = {}
|
||||||
@ -174,7 +184,8 @@ class MetadataDownloader:
|
|||||||
database,
|
database,
|
||||||
logger,
|
logger,
|
||||||
musicbrainz_albumid: str,
|
musicbrainz_albumid: str,
|
||||||
release_group = None
|
release_group=None,
|
||||||
|
fetch_furter: bool = True
|
||||||
):
|
):
|
||||||
self.database = database
|
self.database = database
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
@ -186,10 +197,16 @@ class MetadataDownloader:
|
|||||||
self.release_group = release_group
|
self.release_group = release_group
|
||||||
self.tracklist = []
|
self.tracklist = []
|
||||||
|
|
||||||
result = musicbrainzngs.get_release_by_id(self.musicbrainz_albumid, includes=["recordings", "labels"])
|
result = musicbrainzngs.get_release_by_id(self.musicbrainz_albumid,
|
||||||
|
includes=["recordings", "labels", "release-groups"])
|
||||||
release_data = get_elem_from_obj(result, ['release'], return_if_none={})
|
release_data = get_elem_from_obj(result, ['release'], return_if_none={})
|
||||||
label_data = get_elem_from_obj(release_data, ['label-info-list'], return_if_none={})
|
label_data = get_elem_from_obj(release_data, ['label-info-list'], return_if_none={})
|
||||||
recording_datas = get_elem_from_obj(release_data, ['medium-list', 0, 'track-list'], return_if_none=[])
|
recording_datas = get_elem_from_obj(release_data, ['medium-list', 0, 'track-list'], return_if_none=[])
|
||||||
|
release_group_data = get_elem_from_obj(release_data, ['release-group'], return_if_none={})
|
||||||
|
if self.release_group is None:
|
||||||
|
self.release_group = MetadataDownloader.ReleaseGroup(self.database, self.logger,
|
||||||
|
musicbrainz_releasegroupid=get_elem_from_obj(
|
||||||
|
release_group_data, ['id']), fetch_further=False)
|
||||||
|
|
||||||
self.title = get_elem_from_obj(release_data, ['title'])
|
self.title = get_elem_from_obj(release_data, ['title'])
|
||||||
self.copyright = get_elem_from_obj(label_data, [0, 'label', 'name'])
|
self.copyright = get_elem_from_obj(label_data, [0, 'label', 'name'])
|
||||||
@ -202,7 +219,8 @@ class MetadataDownloader:
|
|||||||
self.barcode = get_elem_from_obj(release_data, ['barcode'])
|
self.barcode = get_elem_from_obj(release_data, ['barcode'])
|
||||||
|
|
||||||
self.save()
|
self.save()
|
||||||
self.append_recordings(recording_datas)
|
if fetch_furter:
|
||||||
|
self.append_recordings(recording_datas)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{self.title} ©{self.copyright} {self.album_status}"
|
return f"{self.title} ©{self.copyright} {self.album_status}"
|
||||||
@ -223,12 +241,14 @@ class MetadataDownloader:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def append_recordings(self, recording_datas: dict):
|
def append_recordings(self, recording_datas: dict):
|
||||||
for recording_data in recording_datas:
|
for i, recording_data in enumerate(recording_datas):
|
||||||
musicbrainz_releasetrackid = get_elem_from_obj(recording_data, ['recording', 'id'])
|
musicbrainz_releasetrackid = get_elem_from_obj(recording_data, ['recording', 'id'])
|
||||||
if musicbrainz_releasetrackid is None:
|
if musicbrainz_releasetrackid is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.tracklist.append(MetadataDownloader.Track(self.database, self.logger, musicbrainz_releasetrackid, self))
|
self.tracklist.append(
|
||||||
|
MetadataDownloader.Track(self.database, self.logger, musicbrainz_releasetrackid, self,
|
||||||
|
track_number=str(i + 1)))
|
||||||
|
|
||||||
class Track:
|
class Track:
|
||||||
def __init__(
|
def __init__(
|
||||||
@ -236,7 +256,8 @@ class MetadataDownloader:
|
|||||||
database,
|
database,
|
||||||
logger,
|
logger,
|
||||||
musicbrainz_releasetrackid: str,
|
musicbrainz_releasetrackid: str,
|
||||||
release = None
|
release=None,
|
||||||
|
track_number: str = None
|
||||||
):
|
):
|
||||||
self.database = database
|
self.database = database
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
@ -249,10 +270,18 @@ class MetadataDownloader:
|
|||||||
self.release = release
|
self.release = release
|
||||||
self.artists = []
|
self.artists = []
|
||||||
|
|
||||||
|
self.track_number = track_number
|
||||||
|
|
||||||
result = musicbrainzngs.get_recording_by_id(self.musicbrainz_releasetrackid,
|
result = musicbrainzngs.get_recording_by_id(self.musicbrainz_releasetrackid,
|
||||||
includes=["artists", "releases", "recording-rels", "isrcs",
|
includes=["artists", "releases", "recording-rels", "isrcs",
|
||||||
"work-level-rels"])
|
"work-level-rels"])
|
||||||
recording_data = result['recording']
|
recording_data = result['recording']
|
||||||
|
release_data = get_elem_from_obj(recording_data, ['release-list', -1])
|
||||||
|
if self.release is None:
|
||||||
|
self.release = MetadataDownloader.Release(self.database, self.logger,
|
||||||
|
get_elem_from_obj(release_data, ['id']), fetch_furter=False)
|
||||||
|
|
||||||
|
|
||||||
for artist_data in get_elem_from_obj(recording_data, ['artist-credit'], return_if_none=[]):
|
for artist_data in get_elem_from_obj(recording_data, ['artist-credit'], return_if_none=[]):
|
||||||
self.append_artist(get_elem_from_obj(artist_data, ['artist', 'id']))
|
self.append_artist(get_elem_from_obj(artist_data, ['artist', 'id']))
|
||||||
|
|
||||||
@ -271,6 +300,7 @@ class MetadataDownloader:
|
|||||||
musicbrainz_releasetrackid=self.musicbrainz_releasetrackid,
|
musicbrainz_releasetrackid=self.musicbrainz_releasetrackid,
|
||||||
musicbrainz_albumid=self.release.musicbrainz_albumid,
|
musicbrainz_albumid=self.release.musicbrainz_albumid,
|
||||||
feature_aritsts=[artist.musicbrainz_artistid for artist in self.artists],
|
feature_aritsts=[artist.musicbrainz_artistid for artist in self.artists],
|
||||||
|
tracknumber=self.track_number,
|
||||||
track=self.title,
|
track=self.title,
|
||||||
isrc=self.isrc
|
isrc=self.isrc
|
||||||
)
|
)
|
||||||
@ -316,14 +346,15 @@ if __name__ == "__main__":
|
|||||||
import database
|
import database
|
||||||
|
|
||||||
database_ = database.Database(os.path.join(temp_dir, "metadata.db"),
|
database_ = database.Database(os.path.join(temp_dir, "metadata.db"),
|
||||||
os.path.join(temp_dir, "database_structure.sql"), db_logger,
|
os.path.join(temp_dir, "database_structure.sql"), db_logger,
|
||||||
reset_anyways=True)
|
reset_anyways=True)
|
||||||
|
|
||||||
download_logger = logging.getLogger("metadata downloader")
|
download_logger = logging.getLogger("metadata downloader")
|
||||||
download_logger.setLevel(logging.INFO)
|
download_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
downloader = MetadataDownloader(database_, download_logger)
|
downloader = MetadataDownloader(database_, download_logger)
|
||||||
|
|
||||||
downloader.download({'id': '5cfecbe4-f600-45e5-9038-ce820eedf3d1', 'type': 'artist'})
|
downloader.download({'id': 'd2006339-9e98-4624-a386-d503328eb854', 'type': 'track'})
|
||||||
|
# downloader.download({'id': 'cdd16860-35fd-46af-bd8c-5de7b15ebc31', 'type': 'release'})
|
||||||
# download({'id': '4b9af532-ef7e-42ab-8b26-c466327cb5e0', 'type': 'release'})
|
# download({'id': '4b9af532-ef7e-42ab-8b26-c466327cb5e0', 'type': 'release'})
|
||||||
# download({'id': 'c24ed9e7-6df9-44de-8570-975f1a5a75d1', 'type': 'track'})
|
# download({'id': 'c24ed9e7-6df9-44de-8570-975f1a5a75d1', 'type': 'track'})
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from datetime import date
|
from datetime import date
|
||||||
|
|
||||||
|
|
||||||
def get_elem_from_obj(current_object, keys: list, after_process=lambda x: x, return_if_none=None):
|
def get_elem_from_obj(current_object, keys: list, after_process=lambda x: x, return_if_none=None):
|
||||||
current_object = current_object
|
current_object = current_object
|
||||||
for key in keys:
|
for key in keys:
|
||||||
@ -9,6 +10,7 @@ def get_elem_from_obj(current_object, keys: list, after_process=lambda x: x, ret
|
|||||||
return return_if_none
|
return return_if_none
|
||||||
return after_process(current_object)
|
return after_process(current_object)
|
||||||
|
|
||||||
|
|
||||||
def parse_music_brainz_date(mb_date: str) -> date:
|
def parse_music_brainz_date(mb_date: str) -> date:
|
||||||
year = 1
|
year = 1
|
||||||
month = 1
|
month = 1
|
||||||
|
@ -37,7 +37,6 @@ def get_string_for_option(option: dict) -> str:
|
|||||||
class Options:
|
class Options:
|
||||||
def __init__(self, results: list):
|
def __init__(self, results: list):
|
||||||
self.results = results
|
self.results = results
|
||||||
print(results)
|
|
||||||
|
|
||||||
self.artist_count = 0
|
self.artist_count = 0
|
||||||
self.release_count = 0
|
self.release_count = 0
|
||||||
@ -69,7 +68,6 @@ class Options:
|
|||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
string = f"artists: {self.artist_count}; releases {self.release_count}; tracks {self.track_count}\n"
|
string = f"artists: {self.artist_count}; releases {self.release_count}; tracks {self.track_count}\n"
|
||||||
for i, option in enumerate(self.result_list):
|
for i, option in enumerate(self.result_list):
|
||||||
print(option)
|
|
||||||
string += f"{i})\t{option['type']}:\t" + get_string_for_option(option)
|
string += f"{i})\t{option['type']}:\t" + get_string_for_option(option)
|
||||||
return string
|
return string
|
||||||
|
|
||||||
|
@ -1,9 +1,14 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import bs4
|
import bs4
|
||||||
|
|
||||||
import phonetic_compares
|
import phonetic_compares
|
||||||
|
|
||||||
|
TRIES = 5
|
||||||
|
TIMEOUT = 10
|
||||||
|
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
session.headers = {
|
session.headers = {
|
||||||
"Connection": "keep-alive",
|
"Connection": "keep-alive",
|
||||||
@ -64,11 +69,18 @@ def download(row):
|
|||||||
return download_from_musify(file_, url)
|
return download_from_musify(file_, url)
|
||||||
|
|
||||||
|
|
||||||
def get_soup_of_search(query: str):
|
def get_soup_of_search(query: str, trie=0):
|
||||||
url = f"https://musify.club/search?searchText={query}"
|
url = f"https://musify.club/search?searchText={query}"
|
||||||
logging.debug(f"Trying to get soup from {url}")
|
logging.debug(f"Trying to get soup from {url}")
|
||||||
r = session.get(url)
|
r = session.get(url)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
|
if r.status_code in [503] and trie < TRIES:
|
||||||
|
logging.warning(f"youtube blocked downloading. ({trie}-{TRIES})")
|
||||||
|
logging.warning(f"retrying in {TIMEOUT} seconds again")
|
||||||
|
time.sleep(TIMEOUT)
|
||||||
|
return get_soup_of_search(query, trie=trie+1)
|
||||||
|
|
||||||
|
logging.warning("too many tries, returning")
|
||||||
raise ConnectionError(f"{r.url} returned {r.status_code}:\n{r.content}")
|
raise ConnectionError(f"{r.url} returned {r.status_code}:\n{r.content}")
|
||||||
return bs4.BeautifulSoup(r.content, features="html.parser")
|
return bs4.BeautifulSoup(r.content, features="html.parser")
|
||||||
|
|
||||||
|
@ -1,20 +1,17 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import json
|
import logging
|
||||||
|
|
||||||
from metadata import database
|
|
||||||
|
|
||||||
|
|
||||||
class UrlPath:
|
class UrlPath:
|
||||||
def __init__(self, genre: str, temp: str = "temp", file: str = ".cache3.csv", step_two_file: str = ".cache2.csv"):
|
def __init__(self, database, logger: logging.Logger, genre: str):
|
||||||
self.temp = temp
|
self.database = database
|
||||||
self.file = file
|
self.logger = logger
|
||||||
|
|
||||||
self.genre = genre
|
self.genre = genre
|
||||||
|
|
||||||
for row in database.get_tracks_without_filepath():
|
for row in self.database.get_tracks_without_filepath():
|
||||||
file, path = self.get_path_from_row(row)
|
file, path = self.get_path_from_row(row)
|
||||||
database.set_filepath(row['id'], file, path, genre)
|
self.database.set_filepath(row['id'], file, path, genre)
|
||||||
|
|
||||||
|
|
||||||
def get_path_from_row(self, row):
|
def get_path_from_row(self, row):
|
||||||
"""
|
"""
|
||||||
@ -23,7 +20,9 @@ class UrlPath:
|
|||||||
:param row:
|
:param row:
|
||||||
:return: path:
|
:return: path:
|
||||||
"""
|
"""
|
||||||
return os.path.join(self.get_genre(), self.get_artist(row), self.get_album(row), f"{self.get_song(row)}.mp3"), os.path.join(self.get_genre(), self.get_artist(row), self.get_album(row))
|
return os.path.join(self.get_genre(), self.get_artist(row), self.get_album(row),
|
||||||
|
f"{self.get_song(row)}.mp3"), os.path.join(self.get_genre(), self.get_artist(row),
|
||||||
|
self.get_album(row))
|
||||||
|
|
||||||
def escape_part(self, part: str):
|
def escape_part(self, part: str):
|
||||||
return part.replace("/", " ")
|
return part.replace("/", " ")
|
||||||
|
Loading…
Reference in New Issue
Block a user