draft: string processing
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
This commit is contained in:
@@ -2,10 +2,12 @@ from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from enum import Enum
|
||||
from typing import List, Dict, Set, Tuple, Optional, Iterable
|
||||
from urllib.parse import urlparse
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Dict, Set, Tuple, Optional, Iterable, Generator
|
||||
from urllib.parse import urlparse, ParseResult
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property
|
||||
|
||||
from ..utils import generate_id
|
||||
from ..utils.enums.source import SourcePages, SourceTypes
|
||||
from ..utils.config import youtube_settings
|
||||
from ..utils.string_processing import hash_url
|
||||
@@ -17,25 +19,21 @@ from .collection import Collection
|
||||
|
||||
|
||||
@dataclass
|
||||
class Source(OuterProxy):
|
||||
class Source:
|
||||
url: str
|
||||
page_enum: SourcePages
|
||||
referrer_page: SourcePages
|
||||
audio_url: Optional[str]
|
||||
|
||||
audio_url: str
|
||||
id: int = field(default_factory=generate_id)
|
||||
additional_data: dict = field(default_factory=dict)
|
||||
|
||||
_default_factories = {
|
||||
"audio_url": lambda: None,
|
||||
}
|
||||
|
||||
# This is automatically generated
|
||||
def __init__(self, page_enum: SourcePages, url: str, referrer_page: SourcePages = None, audio_url: str = None,
|
||||
**kwargs) -> None:
|
||||
|
||||
if referrer_page is None:
|
||||
referrer_page = page_enum
|
||||
|
||||
super().__init__(url=url, page_enum=page_enum, referrer_page=referrer_page, audio_url=audio_url, **kwargs)
|
||||
def __post_init__(self):
|
||||
self.referrer_page = self.referrer_page or self.page_enum
|
||||
|
||||
@cached_property
|
||||
def parsed_url(self) -> ParseResult:
|
||||
return urlparse(self.url)
|
||||
|
||||
@classmethod
|
||||
def match_url(cls, url: str, referrer_page: SourcePages) -> Optional["Source"]:
|
||||
@@ -122,16 +120,23 @@ class Source(OuterProxy):
|
||||
homepage = property(fget=lambda self: SourcePages.get_homepage(self.page_enum))
|
||||
|
||||
|
||||
class SourceCollection(Collection):
|
||||
class SourceCollection:
|
||||
_page_to_source_list: Dict[SourcePages, List[Source]]
|
||||
|
||||
|
||||
def __init__(self, data: Optional[Iterable[Source]] = None, **kwargs):
|
||||
self._page_to_source_list: Dict[SourcePages, List[Source]] = defaultdict(list)
|
||||
self._page_to_source_list = defaultdict(list)
|
||||
|
||||
super().__init__(data=data, **kwargs)
|
||||
def get_sources(self, *source_pages: List[Source]) -> Generator[Source]:
|
||||
for page in source_pages:
|
||||
yield from self._page_to_source_list[page]
|
||||
|
||||
def _map_element(self, __object: Source, **kwargs):
|
||||
super()._map_element(__object, **kwargs)
|
||||
def append(self, source: Source):
|
||||
pass
|
||||
|
||||
self._page_to_source_list[__object.page_enum].append(__object)
|
||||
def extend(self, sources: Iterable[Source]):
|
||||
for source in sources:
|
||||
self.append(source)
|
||||
|
||||
@property
|
||||
def source_pages(self) -> Set[SourcePages]:
|
||||
|
Reference in New Issue
Block a user