draft: rewrite of interface
This commit is contained in:
parent
636645e862
commit
130f5edcfe
@ -1,14 +1,15 @@
|
|||||||
import mutagen
|
import logging
|
||||||
from mutagen.id3 import ID3, Frame, APIC, USLT
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List
|
from typing import List
|
||||||
import logging
|
|
||||||
|
import mutagen
|
||||||
|
from mutagen.id3 import APIC, ID3, USLT, Frame
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
from ..utils.config import logging_settings, main_settings
|
|
||||||
from ..objects import Song, Target, Metadata
|
|
||||||
from ..objects.metadata import Mapping
|
|
||||||
from ..connection import Connection
|
from ..connection import Connection
|
||||||
|
from ..objects import Metadata, Song, Target
|
||||||
|
from ..objects.metadata import Mapping
|
||||||
|
from ..utils.config import logging_settings, main_settings
|
||||||
|
|
||||||
LOGGER = logging_settings["tagging_logger"]
|
LOGGER = logging_settings["tagging_logger"]
|
||||||
|
|
||||||
@ -105,7 +106,7 @@ def write_metadata_to_target(metadata: Metadata, target: Target, song: Song):
|
|||||||
APIC(
|
APIC(
|
||||||
encoding=0,
|
encoding=0,
|
||||||
mime="image/jpeg",
|
mime="image/jpeg",
|
||||||
type=3,
|
type=mutagen.id3.PictureType.COVER_FRONT,
|
||||||
desc=u"Cover",
|
desc=u"Cover",
|
||||||
data=converted_target.read_bytes(),
|
data=converted_target.read_bytes(),
|
||||||
)
|
)
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import copy
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from dataclasses import dataclass, field
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (Any, Callable, Dict, Generator, Generic, Hashable, List,
|
from typing import (Any, Callable, Dict, Generator, Generic, Hashable, List,
|
||||||
Optional, Tuple, TypeVar, Union)
|
Optional, Tuple, TypeVar, Union)
|
||||||
@ -26,7 +28,6 @@ class HumanIO:
|
|||||||
def not_found(key: Any) -> None:
|
def not_found(key: Any) -> None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class Option(Generic[P]):
|
class Option(Generic[P]):
|
||||||
"""
|
"""
|
||||||
This could represent a data object, a string or a page.
|
This could represent a data object, a string or a page.
|
||||||
@ -166,16 +167,59 @@ class Select(Generic[P]):
|
|||||||
return "\n".join(str(option) for option in self)
|
return "\n".join(str(option) for option in self)
|
||||||
|
|
||||||
|
|
||||||
class OptionGroup(Option[P], Select[P]):
|
class Node(Generator[P]):
|
||||||
ALPHABET: str = "abcdefghijklmnopqrstuvwxyz"
|
def __init__(
|
||||||
ATTRIBUTES_FORMATTING: Tuple[str, ...] = ("alphabetic_index", "value")
|
self,
|
||||||
|
value: Optional[P] = None,
|
||||||
|
children: List[Node[P]] = None,
|
||||||
|
parent: Node[P] = None,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
self.value = value
|
||||||
|
self.depth = 0
|
||||||
|
self.same_level_index: int = 0
|
||||||
|
|
||||||
TEXT_TEMPLATE: str = f"{BColors.HEADER.value}{{alphabetic_index}}) {{value}}{BColors.ENDC.value}"
|
|
||||||
|
self.children: List[Node[P]] = kwargs.get("children", [])
|
||||||
|
self.parent: Optional[Node[P]] = kwargs.get("parent", None)
|
||||||
|
|
||||||
|
super(Node, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def hash_key(self, key: Any) -> int:
|
||||||
|
try:
|
||||||
|
key = int(key)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if isinstance(key, str):
|
||||||
|
return hash(unify(key))
|
||||||
|
|
||||||
|
return hash(key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def alphabetic_index(self) -> str:
|
def is_root(self) -> bool:
|
||||||
return self.ALPHABET[self.index % len(self.ALPHABET)]
|
return self.parent is None
|
||||||
|
|
||||||
def __init__(self, value: P, data: Generator[P, None, None] **kwargs):
|
@property
|
||||||
super(OptionGroup, self).__init__(value=value, data=data, **kwargs)
|
def is_leaf(self) -> bool:
|
||||||
|
return not self.children
|
||||||
|
|
||||||
|
def __iter__(self, **kwargs) -> Generator[Node[P], None, None]:
|
||||||
|
_level_index_map: Dict[int, int] = kwargs.get("level_index_map", defaultdict(lambda: 0))
|
||||||
|
|
||||||
|
self.same_level_index = _level_index_map[self.depth]
|
||||||
|
yield self
|
||||||
|
_level_index_map[self.depth] += 1
|
||||||
|
|
||||||
|
for child in self.children:
|
||||||
|
child.depth = self.depth + 1
|
||||||
|
|
||||||
|
for node in child.__iter__(level_index_map=_level_index_map):
|
||||||
|
yield node
|
||||||
|
|
||||||
|
def __getitem__(self, key: Any) -> Option[P]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __contains__(self, key: Any) -> bool:
|
||||||
|
if key in self.option:
|
||||||
|
return True
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
from datetime import datetime
|
import inspect
|
||||||
from pathlib import Path
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import inspect
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
from typing import List, Union
|
from typing import List, Union
|
||||||
|
|
||||||
from .shared import DEBUG, DEBUG_LOGGING, DEBUG_DUMP, DEBUG_TRACE, DEBUG_OBJECT_TRACE, DEBUG_OBJECT_TRACE_CALLSTACK
|
|
||||||
from .config import config, read_config, write_config
|
from .config import config, read_config, write_config
|
||||||
from .enums.colors import BColors
|
from .enums.colors import BColors
|
||||||
from .path_manager import LOCATIONS
|
|
||||||
from .hacking import merge_args
|
from .hacking import merge_args
|
||||||
|
from .path_manager import LOCATIONS
|
||||||
|
from .shared import (DEBUG, DEBUG_DUMP, DEBUG_LOGGING, DEBUG_OBJECT_TRACE,
|
||||||
|
DEBUG_OBJECT_TRACE_CALLSTACK, DEBUG_TRACE)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
IO functions
|
IO functions
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
from typing import Tuple, Union, Optional
|
|
||||||
from pathlib import Path
|
|
||||||
import string
|
import string
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Tuple, Union
|
||||||
|
from urllib.parse import ParseResult, parse_qs, urlparse
|
||||||
|
|
||||||
from transliterate.exceptions import LanguageDetectionError
|
|
||||||
from transliterate import translit
|
|
||||||
from pathvalidate import sanitize_filename
|
from pathvalidate import sanitize_filename
|
||||||
from urllib.parse import urlparse, ParseResult, parse_qs
|
from transliterate import translit
|
||||||
|
from transliterate.exceptions import LanguageDetectionError
|
||||||
|
|
||||||
COMMON_TITLE_APPENDIX_LIST: Tuple[str, ...] = (
|
COMMON_TITLE_APPENDIX_LIST: Tuple[str, ...] = (
|
||||||
"(official video)",
|
"(official video)",
|
||||||
@ -180,6 +179,17 @@ def hash_url(url: Union[str, ParseResult]) -> str:
|
|||||||
r = r.lower().strip()
|
r = r.lower().strip()
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
def hash(self, key: Any) -> int:
|
||||||
|
try:
|
||||||
|
key = int(key)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if isinstance(key, str):
|
||||||
|
return hash(unify(key))
|
||||||
|
|
||||||
|
return hash(key)
|
||||||
|
|
||||||
|
|
||||||
def remove_feature_part_from_track(title: str) -> str:
|
def remove_feature_part_from_track(title: str) -> str:
|
||||||
if ")" != title[-1]:
|
if ")" != title[-1]:
|
||||||
|
Loading…
Reference in New Issue
Block a user