draft: rewrite of interface
This commit is contained in:
parent
636645e862
commit
130f5edcfe
@ -1,14 +1,15 @@
|
||||
import mutagen
|
||||
from mutagen.id3 import ID3, Frame, APIC, USLT
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
import logging
|
||||
|
||||
import mutagen
|
||||
from mutagen.id3 import APIC, ID3, USLT, Frame
|
||||
from PIL import Image
|
||||
|
||||
from ..utils.config import logging_settings, main_settings
|
||||
from ..objects import Song, Target, Metadata
|
||||
from ..objects.metadata import Mapping
|
||||
from ..connection import Connection
|
||||
from ..objects import Metadata, Song, Target
|
||||
from ..objects.metadata import Mapping
|
||||
from ..utils.config import logging_settings, main_settings
|
||||
|
||||
LOGGER = logging_settings["tagging_logger"]
|
||||
|
||||
@ -105,7 +106,7 @@ def write_metadata_to_target(metadata: Metadata, target: Target, song: Song):
|
||||
APIC(
|
||||
encoding=0,
|
||||
mime="image/jpeg",
|
||||
type=3,
|
||||
type=mutagen.id3.PictureType.COVER_FRONT,
|
||||
desc=u"Cover",
|
||||
data=converted_target.read_bytes(),
|
||||
)
|
||||
|
@ -1,7 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import (Any, Callable, Dict, Generator, Generic, Hashable, List,
|
||||
Optional, Tuple, TypeVar, Union)
|
||||
@ -26,7 +28,6 @@ class HumanIO:
|
||||
def not_found(key: Any) -> None:
|
||||
return None
|
||||
|
||||
|
||||
class Option(Generic[P]):
|
||||
"""
|
||||
This could represent a data object, a string or a page.
|
||||
@ -166,16 +167,59 @@ class Select(Generic[P]):
|
||||
return "\n".join(str(option) for option in self)
|
||||
|
||||
|
||||
class OptionGroup(Option[P], Select[P]):
|
||||
ALPHABET: str = "abcdefghijklmnopqrstuvwxyz"
|
||||
ATTRIBUTES_FORMATTING: Tuple[str, ...] = ("alphabetic_index", "value")
|
||||
class Node(Generator[P]):
|
||||
def __init__(
|
||||
self,
|
||||
value: Optional[P] = None,
|
||||
children: List[Node[P]] = None,
|
||||
parent: Node[P] = None,
|
||||
**kwargs
|
||||
):
|
||||
self.value = value
|
||||
self.depth = 0
|
||||
self.same_level_index: int = 0
|
||||
|
||||
TEXT_TEMPLATE: str = f"{BColors.HEADER.value}{{alphabetic_index}}) {{value}}{BColors.ENDC.value}"
|
||||
|
||||
self.children: List[Node[P]] = kwargs.get("children", [])
|
||||
self.parent: Optional[Node[P]] = kwargs.get("parent", None)
|
||||
|
||||
super(Node, self).__init__(**kwargs)
|
||||
|
||||
def hash_key(self, key: Any) -> int:
|
||||
try:
|
||||
key = int(key)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if isinstance(key, str):
|
||||
return hash(unify(key))
|
||||
|
||||
return hash(key)
|
||||
|
||||
@property
|
||||
def alphabetic_index(self) -> str:
|
||||
return self.ALPHABET[self.index % len(self.ALPHABET)]
|
||||
def is_root(self) -> bool:
|
||||
return self.parent is None
|
||||
|
||||
def __init__(self, value: P, data: Generator[P, None, None] **kwargs):
|
||||
super(OptionGroup, self).__init__(value=value, data=data, **kwargs)
|
||||
|
||||
@property
|
||||
def is_leaf(self) -> bool:
|
||||
return not self.children
|
||||
|
||||
def __iter__(self, **kwargs) -> Generator[Node[P], None, None]:
|
||||
_level_index_map: Dict[int, int] = kwargs.get("level_index_map", defaultdict(lambda: 0))
|
||||
|
||||
self.same_level_index = _level_index_map[self.depth]
|
||||
yield self
|
||||
_level_index_map[self.depth] += 1
|
||||
|
||||
for child in self.children:
|
||||
child.depth = self.depth + 1
|
||||
|
||||
for node in child.__iter__(level_index_map=_level_index_map):
|
||||
yield node
|
||||
|
||||
def __getitem__(self, key: Any) -> Option[P]:
|
||||
pass
|
||||
|
||||
def __contains__(self, key: Any) -> bool:
|
||||
if key in self.option:
|
||||
return True
|
||||
|
@ -1,15 +1,16 @@
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import inspect
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Union
|
||||
|
||||
from .shared import DEBUG, DEBUG_LOGGING, DEBUG_DUMP, DEBUG_TRACE, DEBUG_OBJECT_TRACE, DEBUG_OBJECT_TRACE_CALLSTACK
|
||||
from .config import config, read_config, write_config
|
||||
from .enums.colors import BColors
|
||||
from .path_manager import LOCATIONS
|
||||
from .hacking import merge_args
|
||||
from .path_manager import LOCATIONS
|
||||
from .shared import (DEBUG, DEBUG_DUMP, DEBUG_LOGGING, DEBUG_OBJECT_TRACE,
|
||||
DEBUG_OBJECT_TRACE_CALLSTACK, DEBUG_TRACE)
|
||||
|
||||
"""
|
||||
IO functions
|
||||
@ -125,4 +126,4 @@ def get_current_millis() -> int:
|
||||
|
||||
|
||||
def get_unix_time() -> int:
|
||||
return int(datetime.now().timestamp())
|
||||
return int(datetime.now().timestamp())
|
||||
|
@ -1,13 +1,12 @@
|
||||
from typing import Tuple, Union, Optional
|
||||
from pathlib import Path
|
||||
import string
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple, Union
|
||||
from urllib.parse import ParseResult, parse_qs, urlparse
|
||||
|
||||
from transliterate.exceptions import LanguageDetectionError
|
||||
from transliterate import translit
|
||||
from pathvalidate import sanitize_filename
|
||||
from urllib.parse import urlparse, ParseResult, parse_qs
|
||||
|
||||
from transliterate import translit
|
||||
from transliterate.exceptions import LanguageDetectionError
|
||||
|
||||
COMMON_TITLE_APPENDIX_LIST: Tuple[str, ...] = (
|
||||
"(official video)",
|
||||
@ -180,6 +179,17 @@ def hash_url(url: Union[str, ParseResult]) -> str:
|
||||
r = r.lower().strip()
|
||||
return r
|
||||
|
||||
def hash(self, key: Any) -> int:
|
||||
try:
|
||||
key = int(key)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if isinstance(key, str):
|
||||
return hash(unify(key))
|
||||
|
||||
return hash(key)
|
||||
|
||||
|
||||
def remove_feature_part_from_track(title: str) -> str:
|
||||
if ")" != title[-1]:
|
||||
|
Loading…
Reference in New Issue
Block a user