Merge branch 'youtube_music' into experimental
This commit is contained in:
commit
8da016d294
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@ -10,5 +10,9 @@
|
|||||||
"test*.py"
|
"test*.py"
|
||||||
],
|
],
|
||||||
"python.testing.pytestEnabled": false,
|
"python.testing.pytestEnabled": false,
|
||||||
"python.testing.unittestEnabled": true
|
"python.testing.unittestEnabled": true,
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "ms-python.autopep8"
|
||||||
|
},
|
||||||
|
"python.formatting.provider": "none"
|
||||||
}
|
}
|
53
documentation/config.md
Normal file
53
documentation/config.md
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
> This is bs, ima use dynaconf
|
||||||
|
|
||||||
|
# Concept
|
||||||
|
|
||||||
|
The core concept is, to have instances of dataclasses that hold all values. On programm start the values are just overridden by those in the file.
|
||||||
|
|
||||||
|
## Dataclass Structure
|
||||||
|
|
||||||
|
You have one [File](#file) class, that contains a list of [Section](#section) classes.
|
||||||
|
Every [Section](#section) class contains a list of [SectionElement](#section-elements) classes.
|
||||||
|
|
||||||
|
# Classes
|
||||||
|
|
||||||
|
## File
|
||||||
|
|
||||||
|
`File` classes have one name, with whom the path will be generated:
|
||||||
|
|
||||||
|
```
|
||||||
|
{CONFIG_DIR}/{file_name}.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
I also pass in the config direcory in the constructor, such that the module can be pretty independently used. Though it's default value is the default config director from `utils.path_manager`.
|
||||||
|
|
||||||
|
|
||||||
|
They contain a list of [ConfigElement](#config-elements)s, arguably the most important ones.
|
||||||
|
|
||||||
|
## Config Elements
|
||||||
|
|
||||||
|
# Config Syntax
|
||||||
|
|
||||||
|
- every line is stripped from all whitespaces at the beginning and end
|
||||||
|
|
||||||
|
```
|
||||||
|
# a comment
|
||||||
|
|
||||||
|
config_name=some_value
|
||||||
|
|
||||||
|
# list
|
||||||
|
[config_name.list.start]
|
||||||
|
config_name=one list item
|
||||||
|
config_name=another list item
|
||||||
|
[config_name.list.end]
|
||||||
|
|
||||||
|
# dict
|
||||||
|
[config_name.dict.start]
|
||||||
|
one_key=one value item
|
||||||
|
another_key=another value item
|
||||||
|
[config_name.dict.end]
|
||||||
|
```
|
||||||
|
|
||||||
|
- empty lines will be ignored
|
||||||
|
- If `#` is at the beginning of the line, it will be ignored
|
||||||
|
- if there is neither a `\[.*\]` or a `=` in a line, it will raise a warning, but will be ignored
|
1516
documentation/html/youtube-music/index.html
Normal file
1516
documentation/html/youtube-music/index.html
Normal file
File diff suppressed because it is too large
Load Diff
211
documentation/html/youtube-music/search/01-search-request.json
Normal file
211
documentation/html/youtube-music/search/01-search-request.json
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
// https://music.youtube.com/youtubei/v1/search?key=AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30&prettyPrint=false
|
||||||
|
// ctoken could be short for continue token
|
||||||
|
{
|
||||||
|
"POST": {
|
||||||
|
"scheme": "https",
|
||||||
|
"host": "music.youtube.com",
|
||||||
|
"filename": "/youtubei/v1/search",
|
||||||
|
"query": {
|
||||||
|
// ctoken has the same value than continuation, you can just use an empty string, at least once
|
||||||
|
"ctoken": "Eq4DEhVTd2lzcyB1bmQgZGllIEFuZGVyZW4alANFZ1dLQVFJSUFVZ1VhZ3dRQXhBRUVBa1FDaEFGRUJXQ0FRdFlkbGxCZVVWbFpEbDJZNElCQzE4dFZURktTbFpRYjJNMGdnRUxaV0pRVDJaMFZWbGlkazJDQVF0dk9YQnFVVWxGWVY4eWQ0SUJDelJzWm5Nd2JWOXFUMFJuZ2dFTFFVUjZVM2x3WDNoRVpIT0NBUXMxV0V0SldrMXlTWEEwU1lJQkMzVk9TRk5YUm5aaVYxVTBnZ0VMZWxJNE4yMUpiWFE1U1hlQ0FRdFNjMjh0VDE5VlZERnFUWUlCQ3pOeFRsOXFhbVJyYW1FMGdnRUxkME4wTFZsUlgwaHVjRldDQVF0c1ZuZGZZUzE1YUhSTWE0SUJDMEZ5VFc4NVFrTlJWMnMwZ2dFTFExUTJRMjE1ZDFCRVFtLUNBUXROVm1SRVdtSkxiR1ZsWjRJQkMyNDNVbmhoU20wNU9HMUpnZ0VMUVhoMFgxSlJXWHBpVkhPQ0FRdHBhM05LTVdGWkxYRjVNSUlCQzFjMmRqQkJNMGR1WTBWdhjx6tAu",
|
||||||
|
"continuation": "Eq4DEhVTd2lzcyB1bmQgZGllIEFuZGVyZW4alANFZ1dLQVFJSUFVZ1VhZ3dRQXhBRUVBa1FDaEFGRUJXQ0FRdFlkbGxCZVVWbFpEbDJZNElCQzE4dFZURktTbFpRYjJNMGdnRUxaV0pRVDJaMFZWbGlkazJDQVF0dk9YQnFVVWxGWVY4eWQ0SUJDelJzWm5Nd2JWOXFUMFJuZ2dFTFFVUjZVM2x3WDNoRVpIT0NBUXMxV0V0SldrMXlTWEEwU1lJQkMzVk9TRk5YUm5aaVYxVTBnZ0VMZWxJNE4yMUpiWFE1U1hlQ0FRdFNjMjh0VDE5VlZERnFUWUlCQ3pOeFRsOXFhbVJyYW1FMGdnRUxkME4wTFZsUlgwaHVjRldDQVF0c1ZuZGZZUzE1YUhSTWE0SUJDMEZ5VFc4NVFrTlJWMnMwZ2dFTFExUTJRMjE1ZDFCRVFtLUNBUXROVm1SRVdtSkxiR1ZsWjRJQkMyNDNVbmhoU20wNU9HMUpnZ0VMUVhoMFgxSlJXWHBpVkhPQ0FRdHBhM05LTVdGWkxYRjVNSUlCQzFjMmRqQkJNMGR1WTBWdhjx6tAu",
|
||||||
|
"type": "next",
|
||||||
|
"itct": "CAwQybcCIhMIkL7e75uWgAMVHNQRCB2bYQhT", // you can get away with simply not sending this
|
||||||
|
"key": "AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30", // can be gotten through index.html
|
||||||
|
"prettyPrint": "false"
|
||||||
|
},
|
||||||
|
"remote": {
|
||||||
|
"Address": "142.250.181.238:443"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
{
|
||||||
|
"context": {
|
||||||
|
"client": {
|
||||||
|
"hl": "en",
|
||||||
|
"gl": "DE",
|
||||||
|
"remoteHost": "87.123.241.77",
|
||||||
|
"deviceMake": "",
|
||||||
|
"deviceModel": "",
|
||||||
|
"visitorData": "CgtiTUxaTHpoXzk1Zyia59WlBg%3D%3D",
|
||||||
|
"userAgent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0,gzip(gfe)",
|
||||||
|
"clientName": "WEB_REMIX",
|
||||||
|
"clientVersion": "1.20230710.01.00",
|
||||||
|
"osName": "X11",
|
||||||
|
"osVersion": "",
|
||||||
|
"originalUrl": "https://music.youtube.com/",
|
||||||
|
"platform": "DESKTOP",
|
||||||
|
"clientFormFactor": "UNKNOWN_FORM_FACTOR",
|
||||||
|
"configInfo": {
|
||||||
|
"appInstallData": "CJrn1aUGEP61rwUQqcSvBRClma8FEL22rgUQ8LavBRCEtq8FEJLLrwUQvb3-EhCWzq8FEN62rwUQ65OuBRCMy68FEKXC_hIQ5LP-EhDDt_4SEOC2rwUQ1NOvBRCst68FEOe6rwUQos2vBRCMt68FEMy3_hIQ-LWvBRDyqK8FEMyu_hIQhtn-EhDqw68FEKqy_hIQ_bj9EhDM364FELiLrgUQ3M-vBRC0pq8FEOTO_hIQj8OvBRC0ya8F", // You can ignore those thingies, and simply replace them with empty stings
|
||||||
|
"coldConfigData": "CJrn1aUGEOu6rQUQ65OuBRC9tq4FEKT-rgUQ0puvBRC2nq8FEOiorwUQ8qivBRCaq68FEK-srwUQjLevBRDiuq8FEMq_rwUQnsevBRC0ya8FENfKrwUQjMuvBRCSy68FEKLNrwUQ3M-vBRCK0a8FEMjTrwUQ1NOvBRoyQU53R2I4WHo1cUc1NHdmSkloXzJVd0FoWks5RVZfRTdwZ2JYcDgtVlpDN3ZiUS1Rc2ciMkFOd0diOFh6NXFHNTR3ZkpJaF8yVXdBaFpLOUVWX0U3cGdiWHA4LVZaQzd2YlEtUXNnKjxDQU1TS0EwWGdwYW9Bc2dXX2dXZkJJOFNuUXE4RHhVVGtvTFFETE5IcjdZR3VWM2VZcDB2aVNlUHVRUT0%3D",
|
||||||
|
"coldHashData": "CJrn1aUGEhM2OTg5NTQ4NzgzNzUxODI2NDY4GJrn1aUGMjJBTndHYjhYejVxRzU0d2ZKSWhfMlV3QWhaSzlFVl9FN3BnYlhwOC1WWkM3dmJRLVFzZzoyQU53R2I4WHo1cUc1NHdmSkloXzJVd0FoWks5RVZfRTdwZ2JYcDgtVlpDN3ZiUS1Rc2dCPENBTVNLQTBYZ3Bhb0FzZ1dfZ1dmQkk4U25RcThEeFVUa29MUURMTkhyN1lHdVYzZVlwMHZpU2VQdVFRPQ%3D%3D",
|
||||||
|
"hotHashData": "CJrn1aUGEhM3ODUyNzA2MTg5MTM0NTU5OTc2GJrn1aUGKJTk_BIo3JP9EijGsv0SKKq0_RIonpH-Eiiarf4SKL29_hIo3c7-Eijkzv4SKKnT_hIo99f-EiiR2P4SKMPY_hIohtn-EiiX2f4SKI7a_hIoktr-EijI3P4SMjJBTndHYjhYejVxRzU0d2ZKSWhfMlV3QWhaSzlFVl9FN3BnYlhwOC1WWkM3dmJRLVFzZzoyQU53R2I4WHo1cUc1NHdmSkloXzJVd0FoWks5RVZfRTdwZ2JYcDgtVlpDN3ZiUS1Rc2dCKENBTVNHUTBQMklfNUZjb0FxRGtWQ28zaXpReUw3Z0hGa0FEaDBBST0%3D"
|
||||||
|
},
|
||||||
|
"userInterfaceTheme": "USER_INTERFACE_THEME_DARK",
|
||||||
|
"timeZone": "Atlantic/Jan_Mayen",
|
||||||
|
"browserName": "Firefox",
|
||||||
|
"browserVersion": "115.0",
|
||||||
|
"acceptHeader": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||||
|
"deviceExperimentId": "ChxOekkxTmpnek16UTRNVFl4TkRrek1ETTVOdz09EJrn1aUGGJrn1aUG",
|
||||||
|
"screenWidthPoints": 584,
|
||||||
|
"screenHeightPoints": 939,
|
||||||
|
"screenPixelDensity": 1,
|
||||||
|
"screenDensityFloat": 1,
|
||||||
|
"utcOffsetMinutes": 120,
|
||||||
|
"musicAppInfo": {
|
||||||
|
"pwaInstallabilityStatus": "PWA_INSTALLABILITY_STATUS_UNKNOWN",
|
||||||
|
"webDisplayMode": "WEB_DISPLAY_MODE_BROWSER",
|
||||||
|
"storeDigitalGoodsApiSupportStatus": {
|
||||||
|
"playStoreDigitalGoodsApiSupportStatus": "DIGITAL_GOODS_API_SUPPORT_STATUS_UNSUPPORTED"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"user": { "lockedSafetyMode": false },
|
||||||
|
"request": {
|
||||||
|
"useSsl": true,
|
||||||
|
"internalExperimentFlags": [],
|
||||||
|
"consistencyTokenJars": []
|
||||||
|
},
|
||||||
|
"adSignalsInfo": {
|
||||||
|
"params": [ // "params are not necesarily needed
|
||||||
|
{ "key": "dt", "value": "1689613211113" },
|
||||||
|
{ "key": "flash", "value": "0" },
|
||||||
|
{ "key": "frm", "value": "0" },
|
||||||
|
{ "key": "u_tz", "value": "120" },
|
||||||
|
{ "key": "u_his", "value": "5" },
|
||||||
|
{ "key": "u_h", "value": "1024" },
|
||||||
|
{ "key": "u_w", "value": "1280" },
|
||||||
|
{ "key": "u_ah", "value": "1024" },
|
||||||
|
{ "key": "u_aw", "value": "1280" },
|
||||||
|
{ "key": "u_cd", "value": "24" },
|
||||||
|
{ "key": "bc", "value": "31" },
|
||||||
|
{ "key": "bih", "value": "939" },
|
||||||
|
{ "key": "biw", "value": "584" },
|
||||||
|
{ "key": "brdim", "value": "0,0,0,0,1280,0,1280,1024,584,939" },
|
||||||
|
{ "key": "vis", "value": "1" },
|
||||||
|
{ "key": "wgl", "value": "true" },
|
||||||
|
{ "key": "ca_type", "value": "image" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
{
|
||||||
|
"context": {
|
||||||
|
"client": {
|
||||||
|
"hl": "en",
|
||||||
|
"gl": "DE",
|
||||||
|
"remoteHost": "87.123.241.77",
|
||||||
|
"deviceMake": "",
|
||||||
|
"deviceModel": "",
|
||||||
|
"visitorData": "CgtiTUxaTHpoXzk1Zyia59WlBg%3D%3D",
|
||||||
|
"userAgent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0,gzip(gfe)",
|
||||||
|
"clientName": "WEB_REMIX",
|
||||||
|
"clientVersion": "1.20230710.01.00",
|
||||||
|
"osName": "X11",
|
||||||
|
"osVersion": "",
|
||||||
|
"originalUrl": "https://music.youtube.com/",
|
||||||
|
"platform": "DESKTOP",
|
||||||
|
"clientFormFactor": "UNKNOWN_FORM_FACTOR",
|
||||||
|
"configInfo": {
|
||||||
|
"appInstallData": "",
|
||||||
|
"coldConfigData": "",
|
||||||
|
"coldHashData": "",
|
||||||
|
"hotHashData": ""
|
||||||
|
},
|
||||||
|
"userInterfaceTheme": "USER_INTERFACE_THEME_DARK",
|
||||||
|
"timeZone": "Atlantic/Jan_Mayen",
|
||||||
|
"browserName": "Firefox",
|
||||||
|
"browserVersion": "115.0",
|
||||||
|
"acceptHeader": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||||
|
"deviceExperimentId": "ChxOekkxTmpnek16UTRNVFl4TkRrek1ETTVOdz09EJrn1aUGGJrn1aUG",
|
||||||
|
"screenWidthPoints": 584,
|
||||||
|
"screenHeightPoints": 939,
|
||||||
|
"screenPixelDensity": 1,
|
||||||
|
"screenDensityFloat": 1,
|
||||||
|
"utcOffsetMinutes": 120,
|
||||||
|
"musicAppInfo": {
|
||||||
|
"pwaInstallabilityStatus": "PWA_INSTALLABILITY_STATUS_UNKNOWN",
|
||||||
|
"webDisplayMode": "WEB_DISPLAY_MODE_BROWSER",
|
||||||
|
"storeDigitalGoodsApiSupportStatus": {
|
||||||
|
"playStoreDigitalGoodsApiSupportStatus": "DIGITAL_GOODS_API_SUPPORT_STATUS_UNSUPPORTED"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"user": { "lockedSafetyMode": false },
|
||||||
|
"request": {
|
||||||
|
"useSsl": true,
|
||||||
|
"internalExperimentFlags": [],
|
||||||
|
"consistencyTokenJars": []
|
||||||
|
},
|
||||||
|
"adSignalsInfo": {
|
||||||
|
"params": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
-----
|
||||||
|
|
||||||
|
|
||||||
|
{
|
||||||
|
"context":
|
||||||
|
{
|
||||||
|
"client":
|
||||||
|
{
|
||||||
|
"hl":"en",
|
||||||
|
"gl":"DE",
|
||||||
|
"remoteHost":"87.123.241.85",
|
||||||
|
"deviceMake":"",
|
||||||
|
"deviceModel":"",
|
||||||
|
"visitorData":"CgtucS1ibEdPa045ZyiT4YWmBg%3D%3D",
|
||||||
|
"userAgent":"Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0,gzip(gfe)",
|
||||||
|
"clientName":"WEB_REMIX",
|
||||||
|
"clientVersion":"1.20230724.00.00-canary_experiment",
|
||||||
|
"osName":"X11","osVersion":"","originalUrl":"https://music.youtube.com/?cbrd=1","platform":"DESKTOP","clientFormFactor":"UNKNOWN_FORM_FACTOR","configInfo":
|
||||||
|
{"appInstallData":"CJPhhaYGEJ3b_hIQsdWvBRC41a8FEL22rgUQ3ravBRD-ta8FEOe6rwUQw7f-EhDgtq8FEKnErwUQ6sOvBRCst68FEIXZ_hIQ5LP-EhDMrv4SELiLrgUQ65OuBRCMt68FEOPO_hIQwt7-EhDbz68FELTJrwUQ8qivBRD4ta8FEJbOrwUQzN-uBRCPw68FEP24_RIQhLavBRC1pq8FEKqy_hIQksuvBRCa0a8FEMy3_hIQjMuvBRCj1K8FEKXC_hIQ_eeoGBD51a8F","coldConfigData":"CJPhhaYGEOy6rQUQ65OuBRC9tq4FEKT-rgUQ6KivBRDyqK8FEIy3rwUQ4bqvBRDDxq8FEJ7HrwUQ88yvBRDbz68FEMDQrwUQmtGvBRDK068FENTTrwUQo9SvBRCx1a8FELjVrwUQ-dWvBRDZ168FEI7YrwUQ0NmvBRoyQU53R2I4V013TDV5bTJ1S0hPZndFWFZqcFB4b0l6MVRxcllyNFo2dDdKVGRTQjFFS3ciMkFOd0diOFdNd0w1eW0ydUtIT2Z3RVhWanBQeG9JejFUcXJZcjRaNnQ3SlRkU0IxRUt3KkhDQU1TTUEwVGdwYW9Bc2dXX2dXZkJJOFNuUXEwQW9FRWxnTVZINUtDMEF5elI4bVVCdDhhdmxLQ0F0NWluUy1KSjQtNUJBPT0%3D","coldHashData":"CJPhhaYGEhM0OTUzOTkxMTAyODE4MjI5NTY3GJPhhaYGMjJBTndHYjhXTXdMNXltMnVLSE9md0VYVmpwUHhvSXoxVHFyWXI0WjZ0N0pUZFNCMUVLdzoyQU53R2I4V013TDV5bTJ1S0hPZndFWFZqcFB4b0l6MVRxcllyNFo2dDdKVGRTQjFFS3dCSENBTVNNQTBUZ3Bhb0FzZ1dfZ1dmQkk4U25RcTBBb0VFbGdNVkg1S0MwQXl6UjhtVUJ0OGF2bEtDQXQ1aW5TLUpKNC01QkE9PQ%3D%3D","hotHashData":"CJPhhaYGEhQxMjc1MzUxNTg3MDYwNDg5NzEwMRiT4YWmBiiU5PwSKNuT_RIoxrL9EiiqtP0SKJ6R_hIomq3-EiiUzf4SKN3O_hIo487-EiiF2f4SKJfZ_hIondv-EijI3P4SKNjd_hIovt7-EjIyQU53R2I4V013TDV5bTJ1S0hPZndFWFZqcFB4b0l6MVRxcllyNFo2dDdKVGRTQjFFS3c6MkFOd0diOFdNd0w1eW0ydUtIT2Z3RVhWanBQeG9JejFUcXJZcjRaNnQ3SlRkU0IxRUt3QihDQU1TR1EwUDJJXzVGY29BcURrVkNvM2l6UXlMN2dIRmtBRGgwQUk9"},
|
||||||
|
"browserName":"Firefox",
|
||||||
|
"browserVersion":"115.0",
|
||||||
|
"acceptHeader":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||||
|
"deviceExperimentId":"ChxOekkyTURJd056ZzBPRFF3TWpVME5EVTRNUT09EJPhhaYGGJLhhaYG",
|
||||||
|
"screenWidthPoints":923,
|
||||||
|
"screenHeightPoints":964,
|
||||||
|
"screenPixelDensity":1,
|
||||||
|
"screenDensityFloat":1,
|
||||||
|
"utcOffsetMinutes":120,
|
||||||
|
"userInterfaceTheme":"USER_INTERFACE_THEME_DARK",
|
||||||
|
"timeZone":"Atlantic/Jan_Mayen",
|
||||||
|
"musicAppInfo":{"pwaInstallabilityStatus":"PWA_INSTALLABILITY_STATUS_UNKNOWN","webDisplayMode":"WEB_DISPLAY_MODE_BROWSER","storeDigitalGoodsApiSupportStatus":{"playStoreDigitalGoodsApiSupportStatus":"DIGITAL_GOODS_API_SUPPORT_STATUS_UNSUPPORTED"}}
|
||||||
|
},
|
||||||
|
"user":{"lockedSafetyMode":false},
|
||||||
|
"request":{"useSsl":true,"internalExperimentFlags":[],"consistencyTokenJars":[]
|
||||||
|
},
|
||||||
|
"adSignalsInfo":{
|
||||||
|
"params":[
|
||||||
|
{"key":"dt","value":"1690398867909"},
|
||||||
|
{"key":"flash","value":"0"},
|
||||||
|
{"key":"frm","value":"0"},
|
||||||
|
{"key":"u_tz","value":"120"},{"key":"u_his","value":"5"},{"key":"u_h","value":"1080"},{"key":"u_w","value":"1920"},{"key":"u_ah","value":"1049"},{"key":"u_aw","value":"1866"},{"key":"u_cd","value":"24"},{"key":"bc","value":"31"},{"key":"bih","value":"964"},{"key":"biw","value":"923"},{"key":"brdim","value":"1280,31,1280,31,1866,31,1866,1049,923,964"},{"key":"vis","value":"1"},{"key":"wgl","value":"true"},{"key":"ca_type","value":"image"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"query":"psychonaut 4",
|
||||||
|
"suggestStats":{
|
||||||
|
"validationStatus":"VALID",
|
||||||
|
"parameterValidationStatus":"VALID_PARAMETERS",
|
||||||
|
"clientName":"youtube-music",
|
||||||
|
"searchMethod":"ENTER_KEY",
|
||||||
|
"inputMethod":"KEYBOARD",
|
||||||
|
"originalQuery":"psychonaut 4",
|
||||||
|
"availableSuggestions":[{"index":0,"suggestionType":0},{"index":1,"suggestionType":0},{"index":2,"suggestionType":0},{"index":3,"suggestionType":0},{"index":4,"suggestionType":0},{"index":5,"suggestionType":0},{"index":6,"suggestionType":0}],
|
||||||
|
"zeroPrefixEnabled":true,
|
||||||
|
"firstEditTimeMsec":1329258,
|
||||||
|
"lastEditTimeMsec":1330993
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
8040
documentation/html/youtube-music/search/general-result.json
Normal file
8040
documentation/html/youtube-music/search/general-result.json
Normal file
File diff suppressed because it is too large
Load Diff
59
documentation/html/youtube-music/search/search.md
Normal file
59
documentation/html/youtube-music/search/search.md
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
# Search
|
||||||
|
|
||||||
|
## Files
|
||||||
|
|
||||||
|
what is it | query | file
|
||||||
|
---|---|---
|
||||||
|
general search response | `psychonaut 4` | [general-result.json](general-result.json)
|
||||||
|
|
||||||
|
## A general search yields
|
||||||
|
|
||||||
|
- **Top Result**
|
||||||
|
- The top Artist
|
||||||
|
- The most popular songs of said artist
|
||||||
|
- **Songs** (3) excluding the top songs
|
||||||
|
- Videos (3)
|
||||||
|
- **Albums** (3)
|
||||||
|
- Community playlists (3)
|
||||||
|
- **Artists** (3) excluding the top artist
|
||||||
|
- if you search for a artist, it might return simmilar artists in style, not in name
|
||||||
|
|
||||||
|
### Different Renderers
|
||||||
|
|
||||||
|
#### `runs`
|
||||||
|
|
||||||
|
This should be pretty consistently all over the response be parsebal to a list of Music Elements.
|
||||||
|
|
||||||
|
`runs` usually is a list. If a element of the list has the key `navigationEndpoint`, it represents a music elements in a following manner:
|
||||||
|
|
||||||
|
- `text` the name
|
||||||
|
- `navigationEndpoint` -> `browseEndpoint`
|
||||||
|
- `browseId` the id of the artist/song/album...
|
||||||
|
- `browseEndpointContextSupportedConfigs` -> `browseEndpointContextMusicConfig` -> `pageType` the type of the header like element
|
||||||
|
|
||||||
|
#### musicCardShelfRenderer
|
||||||
|
|
||||||
|
Used by e.g. the `Top Results`.
|
||||||
|
|
||||||
|
Contains:
|
||||||
|
|
||||||
|
- One Main-Element (a header like music object) | consists of these keys:
|
||||||
|
- `thumbnail` the image of the header
|
||||||
|
- `title` -> `runs`
|
||||||
|
- for details look [here](#runs).
|
||||||
|
|
||||||
|
|
||||||
|
### Details
|
||||||
|
|
||||||
|
You can get the contents (a list of [renderers](#musiccardshelfrenderer)) this way:
|
||||||
|
|
||||||
|
```python
|
||||||
|
data = r.json().get("contents", {}).get("tabbedSearchResultsRenderer", {}).get("tabs", [{}])[0].get("tabRenderer").get("content", {}).get("sectionListRenderer", {}).get("contents", [])
|
||||||
|
```
|
||||||
|
|
||||||
|
Then the list contains following items, in following order:
|
||||||
|
|
||||||
|
1. _About these results_ (an infobutton)
|
||||||
|
2. The **Top result**
|
||||||
|
3. The **Songs** [_musicShelfRenderer_]
|
||||||
|
4. ...
|
@ -3,7 +3,7 @@ import music_kraken
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
normally_download = [
|
normally_download = [
|
||||||
"s: #a Ghost Bath",
|
"s: #a Favorite #r Anarcho",
|
||||||
"1",
|
"1",
|
||||||
"d: 1, 5"
|
"d: 1, 5"
|
||||||
]
|
]
|
||||||
@ -28,4 +28,9 @@ if __name__ == "__main__":
|
|||||||
"d: 5"
|
"d: 5"
|
||||||
]
|
]
|
||||||
|
|
||||||
music_kraken.cli.download(genre="test", command_list=download_youtube_playlist, process_metadata_anyway=True)
|
youtube_music_test = [
|
||||||
|
"s: #a Favorite #r Anarcho",
|
||||||
|
"0"
|
||||||
|
]
|
||||||
|
|
||||||
|
music_kraken.cli.download(genre="test", command_list=youtube_music_test, process_metadata_anyway=True)
|
||||||
|
@ -3,11 +3,23 @@ import logging
|
|||||||
import gc
|
import gc
|
||||||
import musicbrainzngs
|
import musicbrainzngs
|
||||||
|
|
||||||
from .utils.config import read_config
|
from .utils.shared import DEBUG
|
||||||
from .utils.shared import MODIFY_GC
|
from .utils.config import logging_settings, main_settings, read_config
|
||||||
|
read_config()
|
||||||
from . import cli
|
from . import cli
|
||||||
|
|
||||||
if MODIFY_GC:
|
|
||||||
|
# configure logger default
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging_settings['log_level'] if not DEBUG else logging.DEBUG,
|
||||||
|
format=logging_settings['logging_format'],
|
||||||
|
handlers=[
|
||||||
|
logging.FileHandler(main_settings['log_file']),
|
||||||
|
logging.StreamHandler()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if main_settings['modify_gc']:
|
||||||
"""
|
"""
|
||||||
At the start I modify the garbage collector to run a bit fewer times.
|
At the start I modify the garbage collector to run a bit fewer times.
|
||||||
This should increase speed:
|
This should increase speed:
|
||||||
@ -21,6 +33,3 @@ if MODIFY_GC:
|
|||||||
gen1 = gen1 * 2
|
gen1 = gen1 * 2
|
||||||
gen2 = gen2 * 2
|
gen2 = gen2 * 2
|
||||||
gc.set_threshold(allocs, gen1, gen2)
|
gc.set_threshold(allocs, gen1, gen2)
|
||||||
|
|
||||||
logging.getLogger("musicbrainzngs").setLevel(logging.WARNING)
|
|
||||||
musicbrainzngs.set_useragent("metadata receiver", "0.1", "https://github.com/HeIIow2/music-downloader")
|
|
||||||
|
@ -2,11 +2,14 @@ from typing import List, Tuple
|
|||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
from ffmpeg_progress_yield import FfmpegProgress
|
from ffmpeg_progress_yield import FfmpegProgress
|
||||||
|
|
||||||
from ..utils.shared import BITRATE, AUDIO_FORMAT, CODEX_LOGGER as LOGGER, FFMPEG_BINARY
|
from ..utils.config import main_settings, logging_settings
|
||||||
from ..objects import Target
|
from ..objects import Target
|
||||||
|
|
||||||
|
|
||||||
def correct_codec(target: Target, bitrate_kb: int = BITRATE, audio_format: str = AUDIO_FORMAT, interval_list: List[Tuple[float, float]] = None):
|
LOGGER = logging_settings["codex_logger"]
|
||||||
|
|
||||||
|
|
||||||
|
def correct_codec(target: Target, bitrate_kb: int = main_settings["bitrate"], audio_format: str = main_settings["audio_format"], interval_list: List[Tuple[float, float]] = None):
|
||||||
if not target.exists:
|
if not target.exists:
|
||||||
LOGGER.warning(f"Target doesn't exist: {target.file_path}")
|
LOGGER.warning(f"Target doesn't exist: {target.file_path}")
|
||||||
return
|
return
|
||||||
@ -35,7 +38,7 @@ def correct_codec(target: Target, bitrate_kb: int = BITRATE, audio_format: str =
|
|||||||
|
|
||||||
# build the ffmpeg command
|
# build the ffmpeg command
|
||||||
ffmpeg_command = [
|
ffmpeg_command = [
|
||||||
str(FFMPEG_BINARY),
|
str(main_settings["ffmpeg_binary"]),
|
||||||
"-i", str(target.file_path),
|
"-i", str(target.file_path),
|
||||||
"-af", select,
|
"-af", select,
|
||||||
"-b", str(bitrate_b),
|
"-b", str(bitrate_b),
|
||||||
|
@ -4,12 +4,13 @@ from pathlib import Path
|
|||||||
from typing import List
|
from typing import List
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..utils.shared import (
|
from ..utils.config import logging_settings
|
||||||
TAGGING_LOGGER as LOGGER
|
|
||||||
)
|
|
||||||
from ..objects import Song, Target, Metadata
|
from ..objects import Song, Target, Metadata
|
||||||
|
|
||||||
|
|
||||||
|
LOGGER = logging_settings["tagging_logger"]
|
||||||
|
|
||||||
|
|
||||||
class AudioMetadata:
|
class AudioMetadata:
|
||||||
def __init__(self, file_location: str = None) -> None:
|
def __init__(self, file_location: str = None) -> None:
|
||||||
self._file_location = None
|
self._file_location = None
|
||||||
|
@ -5,8 +5,7 @@ import re
|
|||||||
from .utils import cli_function
|
from .utils import cli_function
|
||||||
from .options.first_config import initial_config
|
from .options.first_config import initial_config
|
||||||
|
|
||||||
from ..utils.config import set_name_to_value, write_config
|
from ..utils.config import write_config, main_settings
|
||||||
from ..utils.shared import MUSIC_DIR, NOT_A_GENRE_REGEX, ENABLE_RESULT_HISTORY, HISTORY_LENGTH, HELP_MESSAGE, HASNT_YET_STARTED
|
|
||||||
from ..utils.regex import URL_PATTERN
|
from ..utils.regex import URL_PATTERN
|
||||||
from ..utils.string_processing import fit_to_file_system
|
from ..utils.string_processing import fit_to_file_system
|
||||||
from ..utils.support_classes import Query, DownloadResult
|
from ..utils.support_classes import Query, DownloadResult
|
||||||
@ -95,12 +94,12 @@ def get_existing_genre() -> List[str]:
|
|||||||
existing_genres: List[str] = []
|
existing_genres: List[str] = []
|
||||||
|
|
||||||
# get all subdirectories of MUSIC_DIR, not the files in the dir.
|
# get all subdirectories of MUSIC_DIR, not the files in the dir.
|
||||||
existing_subdirectories: List[Path] = [f for f in MUSIC_DIR.iterdir() if f.is_dir()]
|
existing_subdirectories: List[Path] = [f for f in main_settings["music_directory"].iterdir() if f.is_dir()]
|
||||||
|
|
||||||
for subdirectory in existing_subdirectories:
|
for subdirectory in existing_subdirectories:
|
||||||
name: str = subdirectory.name
|
name: str = subdirectory.name
|
||||||
|
|
||||||
if not any(re.match(regex_pattern, name) for regex_pattern in NOT_A_GENRE_REGEX):
|
if not any(re.match(regex_pattern, name) for regex_pattern in main_settings["not_a_genre_regex"]):
|
||||||
existing_genres.append(name)
|
existing_genres.append(name)
|
||||||
|
|
||||||
existing_genres.sort()
|
existing_genres.sort()
|
||||||
@ -133,7 +132,7 @@ def get_genre():
|
|||||||
|
|
||||||
def help_message():
|
def help_message():
|
||||||
print()
|
print()
|
||||||
print(HELP_MESSAGE)
|
print(main_settings["happy_messages"])
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
||||||
@ -187,18 +186,18 @@ class Downloader:
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
def set_current_options(self, current_options: Results):
|
def set_current_options(self, current_options: Results):
|
||||||
if ENABLE_RESULT_HISTORY:
|
if main_settings["result_history"]:
|
||||||
self._result_history.append(current_options)
|
self._result_history.append(current_options)
|
||||||
|
|
||||||
if HISTORY_LENGTH != -1:
|
if main_settings["history_length"] != -1:
|
||||||
if len(self._result_history) > HISTORY_LENGTH:
|
if len(self._result_history) > main_settings["history_length"]:
|
||||||
self._result_history.pop(0)
|
self._result_history.pop(0)
|
||||||
|
|
||||||
self.current_results = current_options
|
self.current_results = current_options
|
||||||
|
|
||||||
def previous_option(self) -> bool:
|
def previous_option(self) -> bool:
|
||||||
if not ENABLE_RESULT_HISTORY:
|
if not main_settings["result_history"]:
|
||||||
print("History is turned of.\nGo to settings, and change the value at 'result_history' to 'true'.")
|
print("History is turned of.\nGo to main_settings, and change the value at 'result_history' to 'true'.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if len(self._result_history) <= 1:
|
if len(self._result_history) <= 1:
|
||||||
@ -214,12 +213,15 @@ class Downloader:
|
|||||||
artist = None if not "a" in key_text else Artist(name=key_text["a"], dynamic=True)
|
artist = None if not "a" in key_text else Artist(name=key_text["a"], dynamic=True)
|
||||||
|
|
||||||
if song is not None:
|
if song is not None:
|
||||||
song.album_collection.append(album)
|
if album is not None:
|
||||||
song.main_artist_collection.append(artist)
|
song.album_collection.append(album)
|
||||||
|
if artist is not None:
|
||||||
|
song.main_artist_collection.append(artist)
|
||||||
return Query(raw_query=query, music_object=song)
|
return Query(raw_query=query, music_object=song)
|
||||||
|
|
||||||
if album is not None:
|
if album is not None:
|
||||||
album.artist_collection.append(artist)
|
if artist is not None:
|
||||||
|
album.artist_collection.append(artist)
|
||||||
return Query(raw_query=query, music_object=album)
|
return Query(raw_query=query, music_object=album)
|
||||||
|
|
||||||
if artist is not None:
|
if artist is not None:
|
||||||
@ -393,10 +395,10 @@ def download(
|
|||||||
command_list: List[str] = None,
|
command_list: List[str] = None,
|
||||||
process_metadata_anyway: bool = False,
|
process_metadata_anyway: bool = False,
|
||||||
):
|
):
|
||||||
if HASNT_YET_STARTED:
|
if main_settings["hasnt_yet_started"]:
|
||||||
code = initial_config()
|
code = initial_config()
|
||||||
if code == 0:
|
if code == 0:
|
||||||
set_name_to_value("hasnt_yet_started", "false")
|
main_settings["hasnt_yet_started"] = False
|
||||||
write_config()
|
write_config()
|
||||||
print("Restart the programm to use it.")
|
print("Restart the programm to use it.")
|
||||||
return code
|
return code
|
||||||
|
@ -3,11 +3,12 @@ from typing import List, Dict, Callable, Optional, Set
|
|||||||
from urllib.parse import urlparse, urlunsplit, ParseResult
|
from urllib.parse import urlparse, urlunsplit, ParseResult
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import threading
|
||||||
import requests
|
import requests
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
from .rotating import RotatingProxy
|
from .rotating import RotatingProxy
|
||||||
from ..utils.shared import PROXIES_LIST, CHUNK_SIZE
|
from ..utils.config import main_settings
|
||||||
from ..utils.support_classes import DownloadResult
|
from ..utils.support_classes import DownloadResult
|
||||||
from ..objects import Target
|
from ..objects import Target
|
||||||
|
|
||||||
@ -17,16 +18,17 @@ class Connection:
|
|||||||
self,
|
self,
|
||||||
host: str,
|
host: str,
|
||||||
proxies: List[dict] = None,
|
proxies: List[dict] = None,
|
||||||
tries: int = (len(PROXIES_LIST) + 1) * 4,
|
tries: int = (len(main_settings["proxies"]) + 1) * 4,
|
||||||
timeout: int = 7,
|
timeout: int = 7,
|
||||||
logger: logging.Logger = logging.getLogger("connection"),
|
logger: logging.Logger = logging.getLogger("connection"),
|
||||||
header_values: Dict[str, str] = None,
|
header_values: Dict[str, str] = None,
|
||||||
accepted_response_codes: Set[int] = None,
|
accepted_response_codes: Set[int] = None,
|
||||||
semantic_not_found: bool = True,
|
semantic_not_found: bool = True,
|
||||||
sleep_after_404: float = 0.0
|
sleep_after_404: float = 0.0,
|
||||||
|
heartbeat_interval = 0,
|
||||||
):
|
):
|
||||||
if proxies is None:
|
if proxies is None:
|
||||||
proxies = PROXIES_LIST
|
proxies = main_settings["proxies"]
|
||||||
if header_values is None:
|
if header_values is None:
|
||||||
header_values = dict()
|
header_values = dict()
|
||||||
|
|
||||||
@ -46,6 +48,45 @@ class Connection:
|
|||||||
self.session.headers = self.get_header(**self.HEADER_VALUES)
|
self.session.headers = self.get_header(**self.HEADER_VALUES)
|
||||||
self.session.proxies = self.rotating_proxy.current_proxy
|
self.session.proxies = self.rotating_proxy.current_proxy
|
||||||
|
|
||||||
|
self.session_is_occupied: bool = False
|
||||||
|
|
||||||
|
self.heartbeat_thread = None
|
||||||
|
self.heartbeat_interval = heartbeat_interval
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_agent(self) -> str:
|
||||||
|
return self.session.headers.get("user-agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36")
|
||||||
|
|
||||||
|
|
||||||
|
def start_heartbeat(self):
|
||||||
|
if self.heartbeat_interval <= 0:
|
||||||
|
self.LOGGER.warning(f"Can't start a heartbeat with {self.heartbeat_interval}s in between.")
|
||||||
|
|
||||||
|
self.heartbeat_thread = threading.Thread(target=self._heartbeat_loop, args=(self.heartbeat_interval, ), daemon=True)
|
||||||
|
self.heartbeat_thread.start()
|
||||||
|
|
||||||
|
def heartbeat_failed(self):
|
||||||
|
self.LOGGER.warning(f"I just died... (The heartbeat failed)")
|
||||||
|
|
||||||
|
|
||||||
|
def heartbeat(self):
|
||||||
|
# Your code to send heartbeat requests goes here
|
||||||
|
print("the hearth is beating, but it needs to be implemented ;-;\nFuck youuuu for setting heartbeat in the constructor to true, but not implementing the method Connection.hearbeat()")
|
||||||
|
|
||||||
|
def _heartbeat_loop(self, interval: float):
|
||||||
|
def heartbeat_wrapper():
|
||||||
|
self.session_is_occupied = True
|
||||||
|
self.LOGGER.debug(f"I am living. (sending a heartbeat)")
|
||||||
|
self.heartbeat()
|
||||||
|
self.LOGGER.debug(f"finished the heartbeat")
|
||||||
|
self.session_is_occupied = False
|
||||||
|
|
||||||
|
while True:
|
||||||
|
heartbeat_wrapper()
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def base_url(self, url: ParseResult = None):
|
def base_url(self, url: ParseResult = None):
|
||||||
if url is None:
|
if url is None:
|
||||||
url = self.HOST
|
url = self.HOST
|
||||||
@ -89,6 +130,7 @@ class Connection:
|
|||||||
refer_from_origin: bool = True,
|
refer_from_origin: bool = True,
|
||||||
raw_url: bool = False,
|
raw_url: bool = False,
|
||||||
sleep_after_404: float = None,
|
sleep_after_404: float = None,
|
||||||
|
is_heartbeat: bool = False,
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> Optional[requests.Response]:
|
) -> Optional[requests.Response]:
|
||||||
if sleep_after_404 is None:
|
if sleep_after_404 is None:
|
||||||
@ -111,6 +153,11 @@ class Connection:
|
|||||||
|
|
||||||
connection_failed = False
|
connection_failed = False
|
||||||
try:
|
try:
|
||||||
|
if self.session_is_occupied and not is_heartbeat:
|
||||||
|
self.LOGGER.info(f"Waiting for the heartbeat to finish.")
|
||||||
|
while self.session_is_occupied and not is_heartbeat:
|
||||||
|
pass
|
||||||
|
|
||||||
r: requests.Response = request(request_url, timeout=timeout, headers=headers, **kwargs)
|
r: requests.Response = request(request_url, timeout=timeout, headers=headers, **kwargs)
|
||||||
|
|
||||||
if r.status_code in accepted_response_codes:
|
if r.status_code in accepted_response_codes:
|
||||||
@ -137,6 +184,9 @@ class Connection:
|
|||||||
|
|
||||||
self.rotate()
|
self.rotate()
|
||||||
|
|
||||||
|
if self.heartbeat_interval > 0 and self.heartbeat_thread is None:
|
||||||
|
self.start_heartbeat()
|
||||||
|
|
||||||
return self._request(
|
return self._request(
|
||||||
request=request,
|
request=request,
|
||||||
try_count=try_count+1,
|
try_count=try_count+1,
|
||||||
@ -145,6 +195,7 @@ class Connection:
|
|||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
sleep_after_404=sleep_after_404,
|
sleep_after_404=sleep_after_404,
|
||||||
|
is_heartbeat=is_heartbeat,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -181,7 +232,7 @@ class Connection:
|
|||||||
def post(
|
def post(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
json: dict,
|
json: dict = None,
|
||||||
refer_from_origin: bool = True,
|
refer_from_origin: bool = True,
|
||||||
stream: bool = False,
|
stream: bool = False,
|
||||||
accepted_response_codes: set = None,
|
accepted_response_codes: set = None,
|
||||||
@ -218,7 +269,7 @@ class Connection:
|
|||||||
timeout: float = None,
|
timeout: float = None,
|
||||||
headers: dict = None,
|
headers: dict = None,
|
||||||
raw_url: bool = False,
|
raw_url: bool = False,
|
||||||
chunk_size: int = CHUNK_SIZE,
|
chunk_size: int = main_settings["chunk_size"],
|
||||||
try_count: int = 0,
|
try_count: int = 0,
|
||||||
progress: int = 0,
|
progress: int = 0,
|
||||||
**kwargs
|
**kwargs
|
||||||
|
@ -1,27 +1,40 @@
|
|||||||
from typing import Tuple, Type, Dict, List, Set
|
from typing import Tuple, Type, Dict, Set
|
||||||
|
|
||||||
from .results import SearchResults
|
from .results import SearchResults
|
||||||
from ..objects import DatabaseObject, Source
|
from ..objects import DatabaseObject, Source
|
||||||
|
|
||||||
from ..utils.enums.source import SourcePages
|
from ..utils.enums.source import SourcePages
|
||||||
from ..utils.support_classes import Query, DownloadResult
|
from ..utils.support_classes import Query, DownloadResult
|
||||||
from ..utils.exception.download import UrlNotFoundException
|
from ..utils.exception.download import UrlNotFoundException
|
||||||
from ..pages import Page, EncyclopaediaMetallum, Musify, YouTube, INDEPENDENT_DB_OBJECTS
|
from ..utils.shared import DEBUG_PAGES
|
||||||
|
|
||||||
|
from ..pages import Page, EncyclopaediaMetallum, Musify, YouTube, YoutubeMusic, INDEPENDENT_DB_OBJECTS
|
||||||
|
|
||||||
|
if DEBUG_PAGES:
|
||||||
|
DEBUGGING_PAGE = YoutubeMusic
|
||||||
|
|
||||||
|
|
||||||
ALL_PAGES: Set[Type[Page]] = {
|
ALL_PAGES: Set[Type[Page]] = {
|
||||||
EncyclopaediaMetallum,
|
EncyclopaediaMetallum,
|
||||||
Musify,
|
Musify,
|
||||||
YouTube,
|
YouTube,
|
||||||
|
YoutubeMusic
|
||||||
}
|
}
|
||||||
|
|
||||||
AUDIO_PAGES: Set[Type[Page]] = {
|
AUDIO_PAGES: Set[Type[Page]] = {
|
||||||
Musify,
|
Musify,
|
||||||
YouTube,
|
YouTube,
|
||||||
|
YoutubeMusic
|
||||||
}
|
}
|
||||||
|
|
||||||
SHADY_PAGES: Set[Type[Page]] = {
|
SHADY_PAGES: Set[Type[Page]] = {
|
||||||
Musify,
|
Musify,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if DEBUGGING_PAGE is not None:
|
||||||
|
print(f"The DEBUGGING_PAGE is not None, but {DEBUGGING_PAGE}. Only using this page")
|
||||||
|
ALL_PAGES = {DEBUGGING_PAGE}
|
||||||
|
AUDIO_PAGES = ALL_PAGES.union(AUDIO_PAGES)
|
||||||
|
|
||||||
|
|
||||||
class Pages:
|
class Pages:
|
||||||
@ -67,6 +80,9 @@ class Pages:
|
|||||||
return music_object
|
return music_object
|
||||||
|
|
||||||
for source_page in music_object.source_collection.source_pages:
|
for source_page in music_object.source_collection.source_pages:
|
||||||
|
if source_page not in self._source_to_page:
|
||||||
|
continue
|
||||||
|
|
||||||
page_type = self._source_to_page[source_page]
|
page_type = self._source_to_page[source_page]
|
||||||
|
|
||||||
if page_type in self._pages_set:
|
if page_type in self._pages_set:
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
from typing import List, Iterable, Dict
|
from typing import List, Iterable, Dict, TypeVar, Generic, Iterator
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from .parents import DatabaseObject
|
from .parents import DatabaseObject
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar('T', bound=DatabaseObject)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class AppendResult:
|
class AppendResult:
|
||||||
was_in_collection: bool
|
was_in_collection: bool
|
||||||
@ -12,21 +15,21 @@ class AppendResult:
|
|||||||
was_the_same: bool
|
was_the_same: bool
|
||||||
|
|
||||||
|
|
||||||
class Collection:
|
class Collection(Generic[T]):
|
||||||
"""
|
"""
|
||||||
This a class for the iterables
|
This a class for the iterables
|
||||||
like tracklist or discography
|
like tracklist or discography
|
||||||
"""
|
"""
|
||||||
_data: List[DatabaseObject]
|
_data: List[T]
|
||||||
|
|
||||||
_by_url: dict
|
_by_url: dict
|
||||||
_by_attribute: dict
|
_by_attribute: dict
|
||||||
|
|
||||||
def __init__(self, data: List[DatabaseObject] = None, element_type=None, *args, **kwargs) -> None:
|
def __init__(self, data: List[T] = None, element_type=None, *args, **kwargs) -> None:
|
||||||
# Attribute needs to point to
|
# Attribute needs to point to
|
||||||
self.element_type = element_type
|
self.element_type = element_type
|
||||||
|
|
||||||
self._data: List[DatabaseObject] = list()
|
self._data: List[T] = list()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
example of attribute_to_object_map
|
example of attribute_to_object_map
|
||||||
@ -40,7 +43,7 @@ class Collection:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
self._attribute_to_object_map: Dict[str, Dict[object, DatabaseObject]] = defaultdict(dict)
|
self._attribute_to_object_map: Dict[str, Dict[object, T]] = defaultdict(dict)
|
||||||
self._used_ids: set = set()
|
self._used_ids: set = set()
|
||||||
|
|
||||||
if data is not None:
|
if data is not None:
|
||||||
@ -49,7 +52,7 @@ class Collection:
|
|||||||
def sort(self, reverse: bool = False, **kwargs):
|
def sort(self, reverse: bool = False, **kwargs):
|
||||||
self._data.sort(reverse=reverse, **kwargs)
|
self._data.sort(reverse=reverse, **kwargs)
|
||||||
|
|
||||||
def map_element(self, element: DatabaseObject):
|
def map_element(self, element: T):
|
||||||
for name, value in element.indexing_values:
|
for name, value in element.indexing_values:
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
@ -58,7 +61,7 @@ class Collection:
|
|||||||
|
|
||||||
self._used_ids.add(element.id)
|
self._used_ids.add(element.id)
|
||||||
|
|
||||||
def unmap_element(self, element: DatabaseObject):
|
def unmap_element(self, element: T):
|
||||||
for name, value in element.indexing_values:
|
for name, value in element.indexing_values:
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
@ -70,7 +73,7 @@ class Collection:
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def append(self, element: DatabaseObject, merge_on_conflict: bool = True,
|
def append(self, element: T, merge_on_conflict: bool = True,
|
||||||
merge_into_existing: bool = True) -> AppendResult:
|
merge_into_existing: bool = True) -> AppendResult:
|
||||||
"""
|
"""
|
||||||
:param element:
|
:param element:
|
||||||
@ -117,12 +120,12 @@ class Collection:
|
|||||||
|
|
||||||
return AppendResult(False, element, False)
|
return AppendResult(False, element, False)
|
||||||
|
|
||||||
def extend(self, element_list: Iterable[DatabaseObject], merge_on_conflict: bool = True,
|
def extend(self, element_list: Iterable[T], merge_on_conflict: bool = True,
|
||||||
merge_into_existing: bool = True):
|
merge_into_existing: bool = True):
|
||||||
for element in element_list:
|
for element in element_list:
|
||||||
self.append(element, merge_on_conflict=merge_on_conflict, merge_into_existing=merge_into_existing)
|
self.append(element, merge_on_conflict=merge_on_conflict, merge_into_existing=merge_into_existing)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self) -> Iterator[T]:
|
||||||
for element in self.shallow_list:
|
for element in self.shallow_list:
|
||||||
yield element
|
yield element
|
||||||
|
|
||||||
@ -132,13 +135,13 @@ class Collection:
|
|||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return len(self._data)
|
return len(self._data)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key) -> T:
|
||||||
if type(key) != int:
|
if type(key) != int:
|
||||||
return ValueError("key needs to be an integer")
|
return ValueError("key needs to be an integer")
|
||||||
|
|
||||||
return self._data[key]
|
return self._data[key]
|
||||||
|
|
||||||
def __setitem__(self, key, value: DatabaseObject):
|
def __setitem__(self, key, value: T):
|
||||||
if type(key) != int:
|
if type(key) != int:
|
||||||
return ValueError("key needs to be an integer")
|
return ValueError("key needs to be an integer")
|
||||||
|
|
||||||
@ -149,7 +152,7 @@ class Collection:
|
|||||||
self._data[key] = value
|
self._data[key] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def shallow_list(self) -> List[DatabaseObject]:
|
def shallow_list(self) -> List[T]:
|
||||||
"""
|
"""
|
||||||
returns a shallow copy of the data list
|
returns a shallow copy of the data list
|
||||||
"""
|
"""
|
||||||
|
@ -1,10 +1,14 @@
|
|||||||
import random
|
import random
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Optional, Dict, Tuple, List
|
from typing import Optional, Dict, Tuple, List, Type
|
||||||
|
|
||||||
from .metadata import Metadata
|
from .metadata import Metadata
|
||||||
from .option import Options
|
from .option import Options
|
||||||
from ..utils.shared import ID_RANGE, OBJECT_LOGGER as LOGGER
|
from ..utils.shared import HIGHEST_ID
|
||||||
|
from ..utils.config import main_settings, logging_settings
|
||||||
|
|
||||||
|
|
||||||
|
LOGGER = logging_settings["object_logger"]
|
||||||
|
|
||||||
|
|
||||||
class DatabaseObject:
|
class DatabaseObject:
|
||||||
@ -25,7 +29,7 @@ class DatabaseObject:
|
|||||||
64 bit integer, but this is defined in shared.py in ID_BITS
|
64 bit integer, but this is defined in shared.py in ID_BITS
|
||||||
the range is defined in the Tuple ID_RANGE
|
the range is defined in the Tuple ID_RANGE
|
||||||
"""
|
"""
|
||||||
_id = random.randint(*ID_RANGE)
|
_id = random.randint(0, HIGHEST_ID)
|
||||||
self.automatic_id = True
|
self.automatic_id = True
|
||||||
LOGGER.debug(f"Id for {type(self).__name__} isn't set. Setting to {_id}")
|
LOGGER.debug(f"Id for {type(self).__name__} isn't set. Setting to {_id}")
|
||||||
|
|
||||||
@ -125,6 +129,18 @@ class DatabaseObject:
|
|||||||
|
|
||||||
self._build_recursive_structures(build_version=random.randint(0, 99999), merge=merge_into)
|
self._build_recursive_structures(build_version=random.randint(0, 99999), merge=merge_into)
|
||||||
|
|
||||||
|
def _add_other_db_objects(self, object_type: Type["DatabaseObject"], object_list: List["DatabaseObject"]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def add_list_of_other_objects(self, object_list: List["DatabaseObject"]):
|
||||||
|
d: Dict[Type[DatabaseObject], List[DatabaseObject]] = defaultdict(list)
|
||||||
|
|
||||||
|
for db_object in object_list:
|
||||||
|
d[type(db_object)].append(db_object)
|
||||||
|
|
||||||
|
for key, value in d.items():
|
||||||
|
self._add_other_db_objects(key, value)
|
||||||
|
|
||||||
|
|
||||||
class MainObject(DatabaseObject):
|
class MainObject(DatabaseObject):
|
||||||
"""
|
"""
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import random
|
import random
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import List, Optional, Dict, Tuple
|
from typing import List, Optional, Dict, Tuple, Type
|
||||||
|
|
||||||
import pycountry
|
import pycountry
|
||||||
|
|
||||||
@ -18,7 +18,8 @@ from .parents import MainObject, DatabaseObject
|
|||||||
from .source import Source, SourceCollection
|
from .source import Source, SourceCollection
|
||||||
from .target import Target
|
from .target import Target
|
||||||
from ..utils.string_processing import unify
|
from ..utils.string_processing import unify
|
||||||
from ..utils.shared import SORT_BY_ALBUM_TYPE, SORT_BY_DATE
|
|
||||||
|
from ..utils.config import main_settings
|
||||||
|
|
||||||
"""
|
"""
|
||||||
All Objects dependent
|
All Objects dependent
|
||||||
@ -82,11 +83,11 @@ class Song(MainObject):
|
|||||||
self.notes: FormattedText = notes or FormattedText()
|
self.notes: FormattedText = notes or FormattedText()
|
||||||
|
|
||||||
self.source_collection: SourceCollection = SourceCollection(source_list)
|
self.source_collection: SourceCollection = SourceCollection(source_list)
|
||||||
self.target_collection: Collection = Collection(data=target_list, element_type=Target)
|
self.target_collection: Collection[Target] = Collection(data=target_list, element_type=Target)
|
||||||
self.lyrics_collection: Collection = Collection(data=lyrics_list, element_type=Lyrics)
|
self.lyrics_collection: Collection[Lyrics] = Collection(data=lyrics_list, element_type=Lyrics)
|
||||||
self.album_collection: Collection = Collection(data=album_list, element_type=Album)
|
self.album_collection: Collection[Album] = Collection(data=album_list, element_type=Album)
|
||||||
self.main_artist_collection = Collection(data=main_artist_list, element_type=Artist)
|
self.main_artist_collection: Collection[Artist] = Collection(data=main_artist_list, element_type=Artist)
|
||||||
self.feature_artist_collection = Collection(data=feature_artist_list, element_type=Artist)
|
self.feature_artist_collection: Collection[Artist] = Collection(data=feature_artist_list, element_type=Artist)
|
||||||
|
|
||||||
def _build_recursive_structures(self, build_version: int, merge: bool):
|
def _build_recursive_structures(self, build_version: int, merge: bool):
|
||||||
if build_version == self.build_version:
|
if build_version == self.build_version:
|
||||||
@ -108,6 +109,23 @@ class Song(MainObject):
|
|||||||
artist.main_album_collection.append(album, merge_on_conflict=merge, merge_into_existing=False)
|
artist.main_album_collection.append(album, merge_on_conflict=merge, merge_into_existing=False)
|
||||||
artist._build_recursive_structures(build_version=build_version, merge=merge)
|
artist._build_recursive_structures(build_version=build_version, merge=merge)
|
||||||
|
|
||||||
|
def _add_other_db_objects(self, object_type: Type["DatabaseObject"], object_list: List["DatabaseObject"]):
|
||||||
|
if object_type is Song:
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Lyrics:
|
||||||
|
self.lyrics_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Artist:
|
||||||
|
self.main_artist_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Album:
|
||||||
|
self.album_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def indexing_values(self) -> List[Tuple[str, object]]:
|
def indexing_values(self) -> List[Tuple[str, object]]:
|
||||||
return [
|
return [
|
||||||
@ -255,9 +273,9 @@ class Album(MainObject):
|
|||||||
self.notes = notes or FormattedText()
|
self.notes = notes or FormattedText()
|
||||||
|
|
||||||
self.source_collection: SourceCollection = SourceCollection(source_list)
|
self.source_collection: SourceCollection = SourceCollection(source_list)
|
||||||
self.song_collection: Collection = Collection(data=song_list, element_type=Song)
|
self.song_collection: Collection[Song] = Collection(data=song_list, element_type=Song)
|
||||||
self.artist_collection: Collection = Collection(data=artist_list, element_type=Artist)
|
self.artist_collection: Collection[Artist] = Collection(data=artist_list, element_type=Artist)
|
||||||
self.label_collection: Collection = Collection(data=label_list, element_type=Label)
|
self.label_collection: Collection[Label] = Collection(data=label_list, element_type=Label)
|
||||||
|
|
||||||
def _build_recursive_structures(self, build_version: int, merge: bool):
|
def _build_recursive_structures(self, build_version: int, merge: bool):
|
||||||
if build_version == self.build_version:
|
if build_version == self.build_version:
|
||||||
@ -279,6 +297,22 @@ class Album(MainObject):
|
|||||||
label.album_collection.append(self, merge_on_conflict=merge, merge_into_existing=False)
|
label.album_collection.append(self, merge_on_conflict=merge, merge_into_existing=False)
|
||||||
label._build_recursive_structures(build_version=build_version, merge=merge)
|
label._build_recursive_structures(build_version=build_version, merge=merge)
|
||||||
|
|
||||||
|
def _add_other_db_objects(self, object_type: Type["DatabaseObject"], object_list: List["DatabaseObject"]):
|
||||||
|
if object_type is Song:
|
||||||
|
self.song_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Artist:
|
||||||
|
self.artist_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Album:
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Label:
|
||||||
|
self.label_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def indexing_values(self) -> List[Tuple[str, object]]:
|
def indexing_values(self) -> List[Tuple[str, object]]:
|
||||||
return [
|
return [
|
||||||
@ -473,17 +507,31 @@ class Artist(MainObject):
|
|||||||
i mean do as you want but there is no strict rule about em so good luck
|
i mean do as you want but there is no strict rule about em so good luck
|
||||||
"""
|
"""
|
||||||
self.notes: FormattedText = notes or FormattedText()
|
self.notes: FormattedText = notes or FormattedText()
|
||||||
"""
|
|
||||||
TODO
|
|
||||||
implement in db
|
|
||||||
"""
|
|
||||||
self.lyrical_themes: List[str] = lyrical_themes or []
|
self.lyrical_themes: List[str] = lyrical_themes or []
|
||||||
self.general_genre = general_genre
|
self.general_genre = general_genre
|
||||||
|
|
||||||
self.source_collection: SourceCollection = SourceCollection(source_list)
|
self.source_collection: SourceCollection = SourceCollection(source_list)
|
||||||
self.feature_song_collection: Collection = Collection(data=feature_song_list, element_type=Song)
|
self.feature_song_collection: Collection[Song] = Collection(data=feature_song_list, element_type=Song)
|
||||||
self.main_album_collection: Collection = Collection(data=main_album_list, element_type=Album)
|
self.main_album_collection: Collection[Album] = Collection(data=main_album_list, element_type=Album)
|
||||||
self.label_collection: Collection = Collection(data=label_list, element_type=Label)
|
self.label_collection: Collection[Label] = Collection(data=label_list, element_type=Label)
|
||||||
|
|
||||||
|
def _add_other_db_objects(self, object_type: Type["DatabaseObject"], object_list: List["DatabaseObject"]):
|
||||||
|
if object_type is Song:
|
||||||
|
# this doesn't really make sense
|
||||||
|
# self.feature_song_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Artist:
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Album:
|
||||||
|
self.main_album_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Label:
|
||||||
|
self.label_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
def compile(self, merge_into: bool = False):
|
def compile(self, merge_into: bool = False):
|
||||||
"""
|
"""
|
||||||
@ -515,7 +563,7 @@ class Artist(MainObject):
|
|||||||
AlbumType.STUDIO_ALBUM: 0,
|
AlbumType.STUDIO_ALBUM: 0,
|
||||||
AlbumType.EP: 0,
|
AlbumType.EP: 0,
|
||||||
AlbumType.SINGLE: 1
|
AlbumType.SINGLE: 1
|
||||||
}) if SORT_BY_ALBUM_TYPE else defaultdict(lambda: 0)
|
}) if main_settings["sort_album_by_type"] else defaultdict(lambda: 0)
|
||||||
|
|
||||||
sections = defaultdict(list)
|
sections = defaultdict(list)
|
||||||
|
|
||||||
@ -528,7 +576,7 @@ class Artist(MainObject):
|
|||||||
# album is just a value used in loops
|
# album is just a value used in loops
|
||||||
nonlocal album
|
nonlocal album
|
||||||
|
|
||||||
if SORT_BY_DATE:
|
if main_settings["sort_by_date"]:
|
||||||
_section.sort(key=lambda _album: _album.date, reverse=True)
|
_section.sort(key=lambda _album: _album.date, reverse=True)
|
||||||
|
|
||||||
new_last_albumsort = last_albumsort
|
new_last_albumsort = last_albumsort
|
||||||
@ -685,8 +733,20 @@ class Label(MainObject):
|
|||||||
self.notes = notes or FormattedText()
|
self.notes = notes or FormattedText()
|
||||||
|
|
||||||
self.source_collection: SourceCollection = SourceCollection(source_list)
|
self.source_collection: SourceCollection = SourceCollection(source_list)
|
||||||
self.album_collection: Collection = Collection(data=album_list, element_type=Album)
|
self.album_collection: Collection[Album] = Collection(data=album_list, element_type=Album)
|
||||||
self.current_artist_collection: Collection = Collection(data=current_artist_list, element_type=Artist)
|
self.current_artist_collection: Collection[Artist] = Collection(data=current_artist_list, element_type=Artist)
|
||||||
|
|
||||||
|
def _add_other_db_objects(self, object_type: Type["DatabaseObject"], object_list: List["DatabaseObject"]):
|
||||||
|
if object_type is Song:
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Artist:
|
||||||
|
self.current_artist_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_type is Album:
|
||||||
|
self.album_collection.extend(object_list)
|
||||||
|
return
|
||||||
|
|
||||||
def _build_recursive_structures(self, build_version: int, merge: False):
|
def _build_recursive_structures(self, build_version: int, merge: False):
|
||||||
if build_version == self.build_version:
|
if build_version == self.build_version:
|
||||||
|
@ -4,7 +4,8 @@ from typing import List, Dict, Set, Tuple, Optional
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from ..utils.enums.source import SourcePages, SourceTypes
|
from ..utils.enums.source import SourcePages, SourceTypes
|
||||||
from ..utils.shared import ALL_YOUTUBE_URLS
|
from ..utils.config import youtube_settings
|
||||||
|
|
||||||
from .metadata import Mapping, Metadata
|
from .metadata import Mapping, Metadata
|
||||||
from .parents import DatabaseObject
|
from .parents import DatabaseObject
|
||||||
from .collection import Collection
|
from .collection import Collection
|
||||||
@ -54,7 +55,7 @@ class Source(DatabaseObject):
|
|||||||
if "musify" in parsed.netloc:
|
if "musify" in parsed.netloc:
|
||||||
return cls(SourcePages.MUSIFY, url, referer_page=referer_page)
|
return cls(SourcePages.MUSIFY, url, referer_page=referer_page)
|
||||||
|
|
||||||
if parsed.netloc in [_url.netloc for _url in ALL_YOUTUBE_URLS]:
|
if parsed.netloc in [_url.netloc for _url in youtube_settings['youtube_url']]:
|
||||||
return cls(SourcePages.YOUTUBE, url, referer_page=referer_page)
|
return cls(SourcePages.YOUTUBE, url, referer_page=referer_page)
|
||||||
|
|
||||||
if url.startswith("https://www.deezer"):
|
if url.startswith("https://www.deezer"):
|
||||||
|
@ -6,7 +6,7 @@ import requests
|
|||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
from .parents import DatabaseObject
|
from .parents import DatabaseObject
|
||||||
from ..utils import shared
|
from ..utils.config import main_settings, logging_settings
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger("target")
|
LOGGER = logging.getLogger("target")
|
||||||
@ -36,7 +36,7 @@ class Target(DatabaseObject):
|
|||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(dynamic=dynamic)
|
super().__init__(dynamic=dynamic)
|
||||||
self._file: Path = Path(file)
|
self._file: Path = Path(file)
|
||||||
self._path: Path = Path(shared.MUSIC_DIR, path) if relative_to_music_dir else Path(path)
|
self._path: Path = Path(main_settings["music_directory"], path) if relative_to_music_dir else Path(path)
|
||||||
|
|
||||||
self.is_relative_to_music_dir: bool = relative_to_music_dir
|
self.is_relative_to_music_dir: bool = relative_to_music_dir
|
||||||
|
|
||||||
@ -95,13 +95,13 @@ class Target(DatabaseObject):
|
|||||||
"""
|
"""
|
||||||
with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, desc=desc) as t:
|
with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, desc=desc) as t:
|
||||||
|
|
||||||
for chunk in r.iter_content(chunk_size=shared.CHUNK_SIZE):
|
for chunk in r.iter_content(chunk_size=main_settings["chunk_size"]):
|
||||||
size = f.write(chunk)
|
size = f.write(chunk)
|
||||||
t.update(size)
|
t.update(size)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
shared.DOWNLOAD_LOGGER.error("Stream timed out.")
|
logging_settings["download_logger"].error("Stream timed out.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def open(self, file_mode: str, **kwargs) -> TextIO:
|
def open(self, file_mode: str, **kwargs) -> TextIO:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from .encyclopaedia_metallum import EncyclopaediaMetallum
|
from .encyclopaedia_metallum import EncyclopaediaMetallum
|
||||||
from .musify import Musify
|
from .musify import Musify
|
||||||
from .youtube import YouTube
|
from .youtube import YouTube
|
||||||
|
from .youtube_music import YoutubeMusic
|
||||||
|
|
||||||
from .abstract import Page, INDEPENDENT_DB_OBJECTS
|
from .abstract import Page, INDEPENDENT_DB_OBJECTS
|
||||||
|
@ -22,10 +22,10 @@ from ..objects import (
|
|||||||
from ..utils.enums.source import SourcePages
|
from ..utils.enums.source import SourcePages
|
||||||
from ..utils.enums.album import AlbumType
|
from ..utils.enums.album import AlbumType
|
||||||
from ..audio import write_metadata_to_target, correct_codec
|
from ..audio import write_metadata_to_target, correct_codec
|
||||||
from ..utils import shared
|
from ..utils.config import main_settings
|
||||||
from ..utils.shared import DOWNLOAD_PATH, DOWNLOAD_FILE, AUDIO_FORMAT
|
|
||||||
from ..utils.support_classes import Query, DownloadResult
|
from ..utils.support_classes import Query, DownloadResult
|
||||||
|
|
||||||
|
|
||||||
INDEPENDENT_DB_OBJECTS = Union[Label, Album, Artist, Song]
|
INDEPENDENT_DB_OBJECTS = Union[Label, Album, Artist, Song]
|
||||||
INDEPENDENT_DB_TYPES = Union[Type[Song], Type[Album], Type[Artist], Type[Label]]
|
INDEPENDENT_DB_TYPES = Union[Type[Song], Type[Album], Type[Artist], Type[Label]]
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ class NamingDict(dict):
|
|||||||
self.object_mappings: Dict[str, DatabaseObject] = object_mappings or dict()
|
self.object_mappings: Dict[str, DatabaseObject] = object_mappings or dict()
|
||||||
|
|
||||||
super().__init__(values)
|
super().__init__(values)
|
||||||
self["audio_format"] = AUDIO_FORMAT
|
self["audio_format"] = main_settings["audio_format"]
|
||||||
|
|
||||||
def add_object(self, music_object: DatabaseObject):
|
def add_object(self, music_object: DatabaseObject):
|
||||||
self.object_mappings[type(music_object).__name__.lower()] = music_object
|
self.object_mappings[type(music_object).__name__.lower()] = music_object
|
||||||
@ -351,7 +351,7 @@ class Page:
|
|||||||
if self.NO_ADDITIONAL_DATA_FROM_SONG:
|
if self.NO_ADDITIONAL_DATA_FROM_SONG:
|
||||||
skip_next_details = True
|
skip_next_details = True
|
||||||
|
|
||||||
if not download_all and music_object.album_type in shared.ALBUM_TYPE_BLACKLIST:
|
if not download_all and music_object.album_type.value in main_settings["album_type_blacklist"]:
|
||||||
return DownloadResult()
|
return DownloadResult()
|
||||||
|
|
||||||
if not isinstance(music_object, Song) or not self.NO_ADDITIONAL_DATA_FROM_SONG:
|
if not isinstance(music_object, Song) or not self.NO_ADDITIONAL_DATA_FROM_SONG:
|
||||||
@ -380,12 +380,12 @@ class Page:
|
|||||||
if song.genre is None:
|
if song.genre is None:
|
||||||
song.genre = naming_dict["genre"]
|
song.genre = naming_dict["genre"]
|
||||||
|
|
||||||
path_parts = Formatter().parse(DOWNLOAD_PATH)
|
path_parts = Formatter().parse(main_settings["download_path"])
|
||||||
file_parts = Formatter().parse(DOWNLOAD_FILE)
|
file_parts = Formatter().parse(main_settings["download_file"])
|
||||||
new_target = Target(
|
new_target = Target(
|
||||||
relative_to_music_dir=True,
|
relative_to_music_dir=True,
|
||||||
path=DOWNLOAD_PATH.format(**{part[1]: naming_dict[part[1]] for part in path_parts}),
|
path=main_settings["download_path"].format(**{part[1]: naming_dict[part[1]] for part in path_parts}),
|
||||||
file=DOWNLOAD_FILE.format(**{part[1]: naming_dict[part[1]] for part in file_parts})
|
file=main_settings["download_file"].format(**{part[1]: naming_dict[part[1]] for part in file_parts})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -397,7 +397,7 @@ class Page:
|
|||||||
return DownloadResult(error_message=f"No source found for {song.title} as {self.__class__.__name__}.")
|
return DownloadResult(error_message=f"No source found for {song.title} as {self.__class__.__name__}.")
|
||||||
|
|
||||||
temp_target: Target = Target(
|
temp_target: Target = Target(
|
||||||
path=shared.TEMP_DIR,
|
path=main_settings["temp_directory"],
|
||||||
file=str(random.randint(0, 999999))
|
file=str(random.randint(0, 999999))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ import pycountry
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from ..connection import Connection
|
from ..connection import Connection
|
||||||
from ..utils.shared import ENCYCLOPAEDIA_METALLUM_LOGGER
|
from ..utils.config import logging_settings
|
||||||
from .abstract import Page
|
from .abstract import Page
|
||||||
from ..utils.enums.source import SourcePages
|
from ..utils.enums.source import SourcePages
|
||||||
from ..utils.enums.album import AlbumType
|
from ..utils.enums.album import AlbumType
|
||||||
@ -108,12 +108,12 @@ def _album_from_json(album_html=None, release_type=None, artist_html=None) -> Al
|
|||||||
|
|
||||||
class EncyclopaediaMetallum(Page):
|
class EncyclopaediaMetallum(Page):
|
||||||
SOURCE_TYPE = SourcePages.ENCYCLOPAEDIA_METALLUM
|
SOURCE_TYPE = SourcePages.ENCYCLOPAEDIA_METALLUM
|
||||||
LOGGER = ENCYCLOPAEDIA_METALLUM_LOGGER
|
LOGGER = logging_settings["metal_archives_logger"]
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
self.connection: Connection = Connection(
|
self.connection: Connection = Connection(
|
||||||
host="https://www.metal-archives.com/",
|
host="https://www.metal-archives.com/",
|
||||||
logger=ENCYCLOPAEDIA_METALLUM_LOGGER
|
logger=self.LOGGER
|
||||||
)
|
)
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
@ -23,7 +23,7 @@ from ..objects import (
|
|||||||
DatabaseObject,
|
DatabaseObject,
|
||||||
Lyrics
|
Lyrics
|
||||||
)
|
)
|
||||||
from ..utils.shared import MUSIFY_LOGGER
|
from ..utils.config import logging_settings
|
||||||
from ..utils import string_processing, shared
|
from ..utils import string_processing, shared
|
||||||
from ..utils.support_classes import DownloadResult, Query
|
from ..utils.support_classes import DownloadResult, Query
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ def parse_url(url: str) -> MusifyUrl:
|
|||||||
try:
|
try:
|
||||||
type_enum = MusifyTypes(path[1])
|
type_enum = MusifyTypes(path[1])
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
MUSIFY_LOGGER.warning(f"{path[1]} is not yet implemented, add it to MusifyTypes")
|
logging_settings["musify_logger"].warning(f"{path[1]} is not yet implemented, add it to MusifyTypes")
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
return MusifyUrl(
|
return MusifyUrl(
|
||||||
@ -110,7 +110,7 @@ def parse_url(url: str) -> MusifyUrl:
|
|||||||
class Musify(Page):
|
class Musify(Page):
|
||||||
# CHANGE
|
# CHANGE
|
||||||
SOURCE_TYPE = SourcePages.MUSIFY
|
SOURCE_TYPE = SourcePages.MUSIFY
|
||||||
LOGGER = MUSIFY_LOGGER
|
LOGGER = logging_settings["musify_logger"]
|
||||||
|
|
||||||
HOST = "https://musify.club"
|
HOST = "https://musify.club"
|
||||||
|
|
||||||
|
@ -21,7 +21,9 @@ from ..objects import (
|
|||||||
from ..connection import Connection
|
from ..connection import Connection
|
||||||
from ..utils.string_processing import clean_song_title
|
from ..utils.string_processing import clean_song_title
|
||||||
from ..utils.support_classes import DownloadResult
|
from ..utils.support_classes import DownloadResult
|
||||||
from ..utils.shared import YOUTUBE_LOGGER, INVIDIOUS_INSTANCE, BITRATE, ENABLE_SPONSOR_BLOCK, PIPED_INSTANCE, SLEEP_AFTER_YOUTUBE_403
|
from ..utils.config import youtube_settings, main_settings, logging_settings
|
||||||
|
|
||||||
|
from .youtube_music.super_youtube import SuperYouTube, YouTubeUrl, get_invidious_url, YouTubeUrlType
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -32,107 +34,14 @@ from ..utils.shared import YOUTUBE_LOGGER, INVIDIOUS_INSTANCE, BITRATE, ENABLE_S
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def get_invidious_url(path: str = "", params: str = "", query: str = "", fragment: str = "") -> str:
|
|
||||||
return urlunparse((INVIDIOUS_INSTANCE.scheme, INVIDIOUS_INSTANCE.netloc, path, params, query, fragment))
|
|
||||||
|
|
||||||
def get_piped_url(path: str = "", params: str = "", query: str = "", fragment: str = "") -> str:
|
def get_piped_url(path: str = "", params: str = "", query: str = "", fragment: str = "") -> str:
|
||||||
return urlunparse((PIPED_INSTANCE.scheme, PIPED_INSTANCE.netloc, path, params, query, fragment))
|
return urlunparse((youtube_settings["piped_instance"].scheme, youtube_settings["piped_instance"].netloc, path, params, query, fragment))
|
||||||
|
|
||||||
|
|
||||||
class YouTubeUrlType(Enum):
|
class YouTube(SuperYouTube):
|
||||||
CHANNEL = "channel"
|
|
||||||
PLAYLIST = "playlist"
|
|
||||||
VIDEO = "watch"
|
|
||||||
NONE = ""
|
|
||||||
|
|
||||||
|
|
||||||
class YouTubeUrl:
|
|
||||||
"""
|
|
||||||
Artist
|
|
||||||
https://yt.artemislena.eu/channel/UCV0Ntl3lVR7xDXKoCU6uUXA
|
|
||||||
https://www.youtube.com/channel/UCV0Ntl3lVR7xDXKoCU6uUXA
|
|
||||||
|
|
||||||
Release
|
|
||||||
https://yt.artemislena.eu/playlist?list=OLAK5uy_nEg5joAyFjHBPwnS_ADHYtgSqAjFMQKLw
|
|
||||||
https://www.youtube.com/playlist?list=OLAK5uy_nEg5joAyFjHBPwnS_ADHYtgSqAjFMQKLw
|
|
||||||
|
|
||||||
Track
|
|
||||||
https://yt.artemislena.eu/watch?v=SULFl39UjgY&list=OLAK5uy_nEg5joAyFjHBPwnS_ADHYtgSqAjFMQKLw&index=1
|
|
||||||
https://www.youtube.com/watch?v=SULFl39UjgY
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, url: str) -> None:
|
|
||||||
"""
|
|
||||||
Raises Index exception for wrong url, and value error for not found enum type
|
|
||||||
"""
|
|
||||||
self.id = ""
|
|
||||||
parsed = urlparse(url=url)
|
|
||||||
|
|
||||||
self.url_type: YouTubeUrlType
|
|
||||||
|
|
||||||
type_frag_list = parsed.path.split("/")
|
|
||||||
if len(type_frag_list) < 2:
|
|
||||||
self.url_type = YouTubeUrlType.NONE
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self.url_type = YouTubeUrlType(type_frag_list[1].strip())
|
|
||||||
except ValueError:
|
|
||||||
self.url_type = YouTubeUrlType.NONE
|
|
||||||
|
|
||||||
if self.url_type == YouTubeUrlType.CHANNEL:
|
|
||||||
if len(type_frag_list) < 3:
|
|
||||||
self.couldnt_find_id(url)
|
|
||||||
else:
|
|
||||||
self.id = type_frag_list[2]
|
|
||||||
|
|
||||||
elif self.url_type == YouTubeUrlType.PLAYLIST:
|
|
||||||
query_stuff = parse_qs(parsed.query)
|
|
||||||
if "list" not in query_stuff:
|
|
||||||
self.couldnt_find_id(url)
|
|
||||||
else:
|
|
||||||
self.id = query_stuff["list"][0]
|
|
||||||
|
|
||||||
elif self.url_type == YouTubeUrlType.VIDEO:
|
|
||||||
query_stuff = parse_qs(parsed.query)
|
|
||||||
if "v" not in query_stuff:
|
|
||||||
self.couldnt_find_id(url)
|
|
||||||
else:
|
|
||||||
self.id = query_stuff["v"][0]
|
|
||||||
|
|
||||||
|
|
||||||
def couldnt_find_id(self, url: str):
|
|
||||||
YOUTUBE_LOGGER.warning(f"The id is missing: {url}")
|
|
||||||
self.url_type = YouTubeUrlType.NONE
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api(self) -> str:
|
|
||||||
if self.url_type == YouTubeUrlType.CHANNEL:
|
|
||||||
return get_invidious_url(path=f"/api/v1/channels/playlists/{self.id}")
|
|
||||||
|
|
||||||
if self.url_type == YouTubeUrlType.PLAYLIST:
|
|
||||||
return get_invidious_url(path=f"/api/v1/playlists/{id}")
|
|
||||||
|
|
||||||
if self.url_type == YouTubeUrlType.VIDEO:
|
|
||||||
return get_invidious_url(path=f"/api/v1/videos/{self.id}")
|
|
||||||
|
|
||||||
return get_invidious_url()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def normal(self) -> str:
|
|
||||||
if self.url_type.CHANNEL:
|
|
||||||
return get_invidious_url(path=f"/channel/{self.id}")
|
|
||||||
|
|
||||||
if self.url_type.PLAYLIST:
|
|
||||||
return get_invidious_url(path="/playlist", query=f"list={self.id}")
|
|
||||||
|
|
||||||
if self.url_type.VIDEO:
|
|
||||||
return get_invidious_url(path="/watch", query=f"v={self.id}")
|
|
||||||
|
|
||||||
|
|
||||||
class YouTube(Page):
|
|
||||||
# CHANGE
|
# CHANGE
|
||||||
SOURCE_TYPE = SourcePages.YOUTUBE
|
SOURCE_TYPE = SourcePages.YOUTUBE
|
||||||
LOGGER = YOUTUBE_LOGGER
|
LOGGER = logging_settings["youtube_logger"]
|
||||||
|
|
||||||
NO_ADDITIONAL_DATA_FROM_SONG = True
|
NO_ADDITIONAL_DATA_FROM_SONG = True
|
||||||
|
|
||||||
@ -150,7 +59,7 @@ class YouTube(Page):
|
|||||||
self.download_connection: Connection = Connection(
|
self.download_connection: Connection = Connection(
|
||||||
host="https://www.youtube.com/",
|
host="https://www.youtube.com/",
|
||||||
logger=self.LOGGER,
|
logger=self.LOGGER,
|
||||||
sleep_after_404=SLEEP_AFTER_YOUTUBE_403
|
sleep_after_404=youtube_settings["sleep_after_youtube_403"]
|
||||||
)
|
)
|
||||||
|
|
||||||
# the stuff with the connection is, to ensure sponsorblock uses the proxies, my programm does
|
# the stuff with the connection is, to ensure sponsorblock uses the proxies, my programm does
|
||||||
@ -159,17 +68,6 @@ class YouTube(Page):
|
|||||||
|
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def get_source_type(self, source: Source) -> Optional[Type[DatabaseObject]]:
|
|
||||||
_url_type = {
|
|
||||||
YouTubeUrlType.CHANNEL: Artist,
|
|
||||||
YouTubeUrlType.PLAYLIST: Album,
|
|
||||||
YouTubeUrlType.VIDEO: Song,
|
|
||||||
}
|
|
||||||
|
|
||||||
parsed = YouTubeUrl(source.url)
|
|
||||||
if parsed.url_type in _url_type:
|
|
||||||
return _url_type[parsed.url_type]
|
|
||||||
|
|
||||||
def general_search(self, search_query: str) -> List[DatabaseObject]:
|
def general_search(self, search_query: str) -> List[DatabaseObject]:
|
||||||
return self.artist_search(Artist(name=search_query, dynamic=True))
|
return self.artist_search(Artist(name=search_query, dynamic=True))
|
||||||
|
|
||||||
@ -418,7 +316,7 @@ class YouTube(Page):
|
|||||||
|
|
||||||
bitrate = int(possible_format.get("bitrate", 0))
|
bitrate = int(possible_format.get("bitrate", 0))
|
||||||
|
|
||||||
if bitrate >= BITRATE:
|
if bitrate >= main_settings["bitrate"]:
|
||||||
best_bitrate = bitrate
|
best_bitrate = bitrate
|
||||||
audio_format = possible_format
|
audio_format = possible_format
|
||||||
break
|
break
|
||||||
@ -436,7 +334,7 @@ class YouTube(Page):
|
|||||||
|
|
||||||
|
|
||||||
def get_skip_intervals(self, song: Song, source: Source) -> List[Tuple[float, float]]:
|
def get_skip_intervals(self, song: Song, source: Source) -> List[Tuple[float, float]]:
|
||||||
if not ENABLE_SPONSOR_BLOCK:
|
if not youtube_settings["use_sponsor_block"]:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
parsed = YouTubeUrl(source.url)
|
parsed = YouTubeUrl(source.url)
|
||||||
|
1
src/music_kraken/pages/youtube_music/__init__.py
Normal file
1
src/music_kraken/pages/youtube_music/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .youtube_music import YoutubeMusic
|
105
src/music_kraken/pages/youtube_music/_list_render.py
Normal file
105
src/music_kraken/pages/youtube_music/_list_render.py
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
from typing import List, Optional, Dict, Type
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from ...utils.config import logging_settings
|
||||||
|
from ...objects import Source, DatabaseObject
|
||||||
|
from ..abstract import Page
|
||||||
|
from ...objects import (
|
||||||
|
Artist,
|
||||||
|
Source,
|
||||||
|
SourcePages,
|
||||||
|
Song,
|
||||||
|
Album,
|
||||||
|
Label,
|
||||||
|
Target
|
||||||
|
)
|
||||||
|
from ._music_object_render import parse_run_list, parse_run_element
|
||||||
|
|
||||||
|
|
||||||
|
LOGGER = logging_settings["youtube_music_logger"]
|
||||||
|
|
||||||
|
|
||||||
|
def music_card_shelf_renderer(renderer: dict) -> List[DatabaseObject]:
|
||||||
|
results = parse_run_list(renderer.get("title", {}).get("runs", []))
|
||||||
|
|
||||||
|
for sub_renderer in renderer.get("contents", []):
|
||||||
|
results.extend(parse_renderer(sub_renderer))
|
||||||
|
return results
|
||||||
|
|
||||||
|
def music_responsive_list_item_flex_column_renderer(renderer: dict) -> List[DatabaseObject]:
|
||||||
|
return parse_run_list(renderer.get("text", {}).get("runs", []))
|
||||||
|
|
||||||
|
def music_responsive_list_item_renderer(renderer: dict) -> List[DatabaseObject]:
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for i, collumn in enumerate(renderer.get("flexColumns", [])):
|
||||||
|
_r = parse_renderer(collumn)
|
||||||
|
if i == 0 and len(_r) == 0:
|
||||||
|
renderer["text"] = collumn.get("musicResponsiveListItemFlexColumnRenderer", {}).get("text", {}).get("runs", [{}])[0].get("text")
|
||||||
|
|
||||||
|
results.extend(_r)
|
||||||
|
|
||||||
|
_r = parse_run_element(renderer)
|
||||||
|
if _r is not None:
|
||||||
|
results.append(_r)
|
||||||
|
|
||||||
|
song_list: List[Song] = []
|
||||||
|
album_list: List[Album] = []
|
||||||
|
artist_list: List[Artist] = []
|
||||||
|
_map: Dict[Type[DatabaseObject], List[DatabaseObject]] = {Song: song_list, Album: album_list, Artist: artist_list}
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
_map[type(result)].append(result)
|
||||||
|
|
||||||
|
for song in song_list:
|
||||||
|
song.album_collection.extend(album_list)
|
||||||
|
song.main_artist_collection.extend(artist_list)
|
||||||
|
|
||||||
|
for album in album_list:
|
||||||
|
album.artist_collection.extend(artist_list)
|
||||||
|
|
||||||
|
if len(song_list) > 0:
|
||||||
|
return song_list
|
||||||
|
if len(album_list) > 0:
|
||||||
|
return album_list
|
||||||
|
if len(artist_list) > 0:
|
||||||
|
return artist_list
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def music_shelf_renderer(renderer: dict) -> List[DatabaseObject]:
|
||||||
|
result = []
|
||||||
|
for subrenderer in renderer.get("contents"):
|
||||||
|
result.extend(parse_renderer(subrenderer))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def music_carousel_shelf_renderer(renderer: dict):
|
||||||
|
return music_shelf_renderer(renderer=renderer)
|
||||||
|
|
||||||
|
def music_two_row_item_renderer(renderer: dict):
|
||||||
|
return parse_run_list(renderer.get("title", {}).get("runs", []))
|
||||||
|
|
||||||
|
|
||||||
|
RENDERER_PARSERS = {
|
||||||
|
"musicCardShelfRenderer": music_card_shelf_renderer,
|
||||||
|
"musicResponsiveListItemRenderer": music_responsive_list_item_renderer,
|
||||||
|
"musicResponsiveListItemFlexColumnRenderer": music_responsive_list_item_flex_column_renderer,
|
||||||
|
"musicShelfRenderer": music_card_shelf_renderer,
|
||||||
|
"musicCarouselShelfRenderer": music_carousel_shelf_renderer,
|
||||||
|
"musicTwoRowItemRenderer": music_two_row_item_renderer,
|
||||||
|
|
||||||
|
"itemSectionRenderer": lambda _: [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def parse_renderer(renderer: dict) -> List[DatabaseObject]:
|
||||||
|
result: List[DatabaseObject] = []
|
||||||
|
|
||||||
|
for renderer_name, renderer in renderer.items():
|
||||||
|
if renderer_name not in RENDERER_PARSERS:
|
||||||
|
LOGGER.warning(f"Can't parse the renderer {renderer_name}.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
result.extend(RENDERER_PARSERS[renderer_name](renderer))
|
||||||
|
|
||||||
|
return result
|
80
src/music_kraken/pages/youtube_music/_music_object_render.py
Normal file
80
src/music_kraken/pages/youtube_music/_music_object_render.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
from typing import List, Optional
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from ...utils.config import youtube_settings, logging_settings
|
||||||
|
from ...objects import Source, DatabaseObject
|
||||||
|
from ..abstract import Page
|
||||||
|
from ...objects import (
|
||||||
|
Artist,
|
||||||
|
Source,
|
||||||
|
SourcePages,
|
||||||
|
Song,
|
||||||
|
Album,
|
||||||
|
Label,
|
||||||
|
Target
|
||||||
|
)
|
||||||
|
|
||||||
|
LOGGER = logging_settings["youtube_music_logger"]
|
||||||
|
|
||||||
|
|
||||||
|
SOURCE_PAGE = SourcePages.YOUTUBE_MUSIC
|
||||||
|
|
||||||
|
|
||||||
|
class PageType(Enum):
|
||||||
|
ARTIST = "MUSIC_PAGE_TYPE_ARTIST"
|
||||||
|
ALBUM = "MUSIC_PAGE_TYPE_ALBUM"
|
||||||
|
CHANNEL = "MUSIC_PAGE_TYPE_USER_CHANNEL"
|
||||||
|
PLAYLIST = "MUSIC_PAGE_TYPE_PLAYLIST"
|
||||||
|
SONG = "MUSIC_VIDEO_TYPE_ATV"
|
||||||
|
VIDEO = "MUSIC_VIDEO_TYPE_UGC"
|
||||||
|
OFFICIAL_MUSIC_VIDEO = "MUSIC_VIDEO_TYPE_OMV"
|
||||||
|
|
||||||
|
|
||||||
|
def parse_run_element(run_element: dict) -> Optional[DatabaseObject]:
|
||||||
|
if "navigationEndpoint" not in run_element:
|
||||||
|
return
|
||||||
|
|
||||||
|
_temp_nav = run_element.get("navigationEndpoint", {})
|
||||||
|
is_video = "watchEndpoint" in _temp_nav
|
||||||
|
|
||||||
|
navigation_endpoint = _temp_nav.get("watchEndpoint" if is_video else "browseEndpoint", {})
|
||||||
|
|
||||||
|
element_type = PageType.SONG
|
||||||
|
page_type_string = navigation_endpoint.get("watchEndpointMusicSupportedConfigs", {}).get("watchEndpointMusicConfig", {}).get("musicVideoType", "")
|
||||||
|
if not is_video:
|
||||||
|
page_type_string = navigation_endpoint.get("browseEndpointContextSupportedConfigs", {}).get("browseEndpointContextMusicConfig", {}).get("pageType", "")
|
||||||
|
element_type = PageType(page_type_string)
|
||||||
|
|
||||||
|
element_id = navigation_endpoint.get("videoId" if is_video else "browseId")
|
||||||
|
element_text = run_element.get("text")
|
||||||
|
|
||||||
|
if element_id is None or element_text is None:
|
||||||
|
LOGGER.warning("Couldn't find either the id or text of a Youtube music element.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if element_type == PageType.SONG or (element_type == PageType.VIDEO and not youtube_settings["youtube_music_clean_data"]) or (element_type == PageType.OFFICIAL_MUSIC_VIDEO and not youtube_settings["youtube_music_clean_data"]):
|
||||||
|
source = Source(SOURCE_PAGE, f"https://music.youtube.com/watch?v={element_id}")
|
||||||
|
return Song(title=element_text, source_list=[source])
|
||||||
|
|
||||||
|
if element_type == PageType.ARTIST or (element_type == PageType.CHANNEL and not youtube_settings["youtube_music_clean_data"]):
|
||||||
|
source = Source(SOURCE_PAGE, f"https://music.youtube.com/channel/{element_id}")
|
||||||
|
return Artist(name=element_text, source_list=[source])
|
||||||
|
|
||||||
|
if element_type == PageType.ALBUM or (element_type == PageType.PLAYLIST and not youtube_settings["youtube_music_clean_data"]):
|
||||||
|
source = Source(SOURCE_PAGE, f"https://music.youtube.com/playlist?list={element_id}")
|
||||||
|
return Album(title=element_text, source_list=[source])
|
||||||
|
|
||||||
|
LOGGER.debug(f"Type {page_type_string} wasn't implemented.")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_run_list(run_list: List[dict]) -> List[DatabaseObject]:
|
||||||
|
music_object_list: List[DatabaseObject] = []
|
||||||
|
|
||||||
|
for run_renderer in run_list:
|
||||||
|
music_object = parse_run_element(run_renderer)
|
||||||
|
if music_object is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
music_object_list.append(music_object)
|
||||||
|
|
||||||
|
return music_object_list
|
217
src/music_kraken/pages/youtube_music/super_youtube.py
Normal file
217
src/music_kraken/pages/youtube_music/super_youtube.py
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
from typing import List, Optional, Type, Tuple
|
||||||
|
from urllib.parse import urlparse, urlunparse, parse_qs
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
import sponsorblock
|
||||||
|
from sponsorblock.errors import HTTPException, NotFoundException
|
||||||
|
|
||||||
|
from ...objects import Source, DatabaseObject, Song, Target
|
||||||
|
from ..abstract import Page
|
||||||
|
from ...objects import (
|
||||||
|
Artist,
|
||||||
|
Source,
|
||||||
|
SourcePages,
|
||||||
|
Song,
|
||||||
|
Album,
|
||||||
|
Label,
|
||||||
|
Target,
|
||||||
|
FormattedText,
|
||||||
|
ID3Timestamp
|
||||||
|
)
|
||||||
|
from ...connection import Connection
|
||||||
|
from ...utils.support_classes import DownloadResult
|
||||||
|
from ...utils.config import youtube_settings, logging_settings, main_settings
|
||||||
|
|
||||||
|
|
||||||
|
def get_invidious_url(path: str = "", params: str = "", query: str = "", fragment: str = "") -> str:
|
||||||
|
return urlunparse((youtube_settings["invidious_instance"].scheme, youtube_settings["invidious_instance"].netloc, path, params, query, fragment))
|
||||||
|
|
||||||
|
|
||||||
|
class YouTubeUrlType(Enum):
|
||||||
|
CHANNEL = "channel"
|
||||||
|
PLAYLIST = "playlist"
|
||||||
|
VIDEO = "watch"
|
||||||
|
NONE = ""
|
||||||
|
|
||||||
|
|
||||||
|
class YouTubeUrl:
|
||||||
|
"""
|
||||||
|
Artist
|
||||||
|
https://yt.artemislena.eu/channel/UCV0Ntl3lVR7xDXKoCU6uUXA
|
||||||
|
https://www.youtube.com/channel/UCV0Ntl3lVR7xDXKoCU6uUXA
|
||||||
|
|
||||||
|
Release
|
||||||
|
https://yt.artemislena.eu/playlist?list=OLAK5uy_nEg5joAyFjHBPwnS_ADHYtgSqAjFMQKLw
|
||||||
|
https://www.youtube.com/playlist?list=OLAK5uy_nEg5joAyFjHBPwnS_ADHYtgSqAjFMQKLw
|
||||||
|
|
||||||
|
Track
|
||||||
|
https://yt.artemislena.eu/watch?v=SULFl39UjgY&list=OLAK5uy_nEg5joAyFjHBPwnS_ADHYtgSqAjFMQKLw&index=1
|
||||||
|
https://www.youtube.com/watch?v=SULFl39UjgY
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url: str) -> None:
|
||||||
|
self.SOURCE_TYPE = SourcePages.YOUTUBE
|
||||||
|
|
||||||
|
"""
|
||||||
|
Raises Index exception for wrong url, and value error for not found enum type
|
||||||
|
"""
|
||||||
|
self.id = ""
|
||||||
|
parsed = urlparse(url=url)
|
||||||
|
|
||||||
|
if parsed.netloc == "music.youtube.com":
|
||||||
|
self.SOURCE_TYPE = SourcePages.YOUTUBE_MUSIC
|
||||||
|
|
||||||
|
self.url_type: YouTubeUrlType
|
||||||
|
|
||||||
|
type_frag_list = parsed.path.split("/")
|
||||||
|
if len(type_frag_list) < 2:
|
||||||
|
self.url_type = YouTubeUrlType.NONE
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.url_type = YouTubeUrlType(type_frag_list[1].strip())
|
||||||
|
except ValueError:
|
||||||
|
self.url_type = YouTubeUrlType.NONE
|
||||||
|
|
||||||
|
if self.url_type == YouTubeUrlType.CHANNEL:
|
||||||
|
if len(type_frag_list) < 3:
|
||||||
|
self.couldnt_find_id(url)
|
||||||
|
else:
|
||||||
|
self.id = type_frag_list[2]
|
||||||
|
|
||||||
|
elif self.url_type == YouTubeUrlType.PLAYLIST:
|
||||||
|
query_stuff = parse_qs(parsed.query)
|
||||||
|
if "list" not in query_stuff:
|
||||||
|
self.couldnt_find_id(url)
|
||||||
|
else:
|
||||||
|
self.id = query_stuff["list"][0]
|
||||||
|
|
||||||
|
elif self.url_type == YouTubeUrlType.VIDEO:
|
||||||
|
query_stuff = parse_qs(parsed.query)
|
||||||
|
if "v" not in query_stuff:
|
||||||
|
self.couldnt_find_id(url)
|
||||||
|
else:
|
||||||
|
self.id = query_stuff["v"][0]
|
||||||
|
|
||||||
|
|
||||||
|
def couldnt_find_id(self, url: str):
|
||||||
|
logging_settings["youtube_logger"].warning(f"The id is missing: {url}")
|
||||||
|
self.url_type = YouTubeUrlType.NONE
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api(self) -> str:
|
||||||
|
if self.url_type == YouTubeUrlType.CHANNEL:
|
||||||
|
return get_invidious_url(path=f"/api/v1/channels/playlists/{self.id}")
|
||||||
|
|
||||||
|
if self.url_type == YouTubeUrlType.PLAYLIST:
|
||||||
|
return get_invidious_url(path=f"/api/v1/playlists/{id}")
|
||||||
|
|
||||||
|
if self.url_type == YouTubeUrlType.VIDEO:
|
||||||
|
return get_invidious_url(path=f"/api/v1/videos/{self.id}")
|
||||||
|
|
||||||
|
return get_invidious_url()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def normal(self) -> str:
|
||||||
|
if self.url_type.CHANNEL:
|
||||||
|
return get_invidious_url(path=f"/channel/{self.id}")
|
||||||
|
|
||||||
|
if self.url_type.PLAYLIST:
|
||||||
|
return get_invidious_url(path="/playlist", query=f"list={self.id}")
|
||||||
|
|
||||||
|
if self.url_type.VIDEO:
|
||||||
|
return get_invidious_url(path="/watch", query=f"v={self.id}")
|
||||||
|
|
||||||
|
|
||||||
|
class SuperYouTube(Page):
|
||||||
|
# CHANGE
|
||||||
|
SOURCE_TYPE = SourcePages.YOUTUBE
|
||||||
|
LOGGER = logging_settings["youtube_logger"]
|
||||||
|
|
||||||
|
NO_ADDITIONAL_DATA_FROM_SONG = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.download_connection: Connection = Connection(
|
||||||
|
host="https://www.youtube.com/",
|
||||||
|
logger=self.LOGGER,
|
||||||
|
sleep_after_404=youtube_settings["sleep_after_youtube_403"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# the stuff with the connection is, to ensure sponsorblock uses the proxies, my programm does
|
||||||
|
_sponsorblock_connection: Connection = Connection(host="https://sponsor.ajay.app/")
|
||||||
|
self.sponsorblock_client = sponsorblock.Client(session=_sponsorblock_connection.session)
|
||||||
|
|
||||||
|
|
||||||
|
def get_source_type(self, source: Source) -> Optional[Type[DatabaseObject]]:
|
||||||
|
_url_type = {
|
||||||
|
YouTubeUrlType.CHANNEL: Artist,
|
||||||
|
YouTubeUrlType.PLAYLIST: Album,
|
||||||
|
YouTubeUrlType.VIDEO: Song,
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed = YouTubeUrl(source.url)
|
||||||
|
if parsed.url_type in _url_type:
|
||||||
|
return _url_type[parsed.url_type]
|
||||||
|
|
||||||
|
|
||||||
|
def download_song_to_target(self, source: Source, target: Target, desc: str = None) -> DownloadResult:
|
||||||
|
"""
|
||||||
|
1. getting the optimal source
|
||||||
|
Only audio sources allowed
|
||||||
|
not a bitrate that is smaller than the selected bitrate, but not one that is wayyy huger
|
||||||
|
|
||||||
|
2. download it
|
||||||
|
|
||||||
|
:param source:
|
||||||
|
:param target:
|
||||||
|
:param desc:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
r = self.connection.get(YouTubeUrl(source.url).api)
|
||||||
|
if r is None:
|
||||||
|
return DownloadResult(error_message="Api didn't even respond, maybe try another invidious Instance")
|
||||||
|
|
||||||
|
audio_format = None
|
||||||
|
best_bitrate = 0
|
||||||
|
|
||||||
|
for possible_format in r.json()["adaptiveFormats"]:
|
||||||
|
format_type: str = possible_format["type"]
|
||||||
|
if not format_type.startswith("audio"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
bitrate = int(possible_format.get("bitrate", 0))
|
||||||
|
|
||||||
|
if bitrate >= main_settings["bitrate"]:
|
||||||
|
best_bitrate = bitrate
|
||||||
|
audio_format = possible_format
|
||||||
|
break
|
||||||
|
|
||||||
|
if bitrate > best_bitrate:
|
||||||
|
best_bitrate = bitrate
|
||||||
|
audio_format = possible_format
|
||||||
|
|
||||||
|
if audio_format is None:
|
||||||
|
return DownloadResult(error_message="Couldn't find the download link.")
|
||||||
|
|
||||||
|
endpoint = audio_format["url"]
|
||||||
|
|
||||||
|
return self.download_connection.stream_into(endpoint, target, description=desc, raw_url=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_skip_intervals(self, song: Song, source: Source) -> List[Tuple[float, float]]:
|
||||||
|
if not youtube_settings["use_sponsor_block"]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
parsed = YouTubeUrl(source.url)
|
||||||
|
if parsed.url_type != YouTubeUrlType.VIDEO:
|
||||||
|
self.LOGGER.warning(f"{source.url} is no video url.")
|
||||||
|
return []
|
||||||
|
|
||||||
|
segments = []
|
||||||
|
try:
|
||||||
|
segments = self.sponsorblock_client.get_skip_segments(parsed.id)
|
||||||
|
except NotFoundException:
|
||||||
|
self.LOGGER.debug(f"No sponsor found for the video {parsed.id}.")
|
||||||
|
except HTTPException as e:
|
||||||
|
self.LOGGER.warning(f"{e}")
|
||||||
|
|
||||||
|
return [(segment.start, segment.end) for segment in segments]
|
353
src/music_kraken/pages/youtube_music/youtube_music.py
Normal file
353
src/music_kraken/pages/youtube_music/youtube_music.py
Normal file
@ -0,0 +1,353 @@
|
|||||||
|
from typing import Dict, List, Optional, Set, Type
|
||||||
|
from urllib.parse import urlparse, urlunparse, quote, parse_qs
|
||||||
|
import logging
|
||||||
|
import random
|
||||||
|
import json
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ...utils.exception.config import SettingValueError
|
||||||
|
from ...utils.config import main_settings, youtube_settings, logging_settings
|
||||||
|
from ...utils.shared import DEBUG, DEBUG_YOUTUBE_INITIALIZING
|
||||||
|
from ...utils.functions import get_current_millis
|
||||||
|
if DEBUG:
|
||||||
|
from ...utils.debug_utils import dump_to_file
|
||||||
|
|
||||||
|
from ...objects import Source, DatabaseObject
|
||||||
|
from ..abstract import Page
|
||||||
|
from ...objects import (
|
||||||
|
Artist,
|
||||||
|
Source,
|
||||||
|
SourcePages,
|
||||||
|
Song,
|
||||||
|
Album,
|
||||||
|
Label,
|
||||||
|
Target
|
||||||
|
)
|
||||||
|
from ...connection import Connection
|
||||||
|
from ...utils.support_classes import DownloadResult
|
||||||
|
|
||||||
|
from ._list_render import parse_renderer
|
||||||
|
from .super_youtube import SuperYouTube
|
||||||
|
|
||||||
|
|
||||||
|
def get_youtube_url(path: str = "", params: str = "", query: str = "", fragment: str = "") -> str:
|
||||||
|
return urlunparse(("https", "music.youtube.com", path, params, query, fragment))
|
||||||
|
|
||||||
|
|
||||||
|
class YoutubeMusicConnection(Connection):
|
||||||
|
"""
|
||||||
|
===heartbeat=timings=for=YOUTUBEMUSIC===
|
||||||
|
96.27
|
||||||
|
98.16
|
||||||
|
100.04
|
||||||
|
101.93
|
||||||
|
103.82
|
||||||
|
|
||||||
|
--> average delay in between: 1.8875 min
|
||||||
|
"""
|
||||||
|
def __init__(self, logger: logging.Logger, accept_language: str):
|
||||||
|
# https://stackoverflow.com/questions/30561260/python-change-accept-language-using-requests
|
||||||
|
super().__init__(
|
||||||
|
host="https://music.youtube.com/",
|
||||||
|
logger=logger,
|
||||||
|
heartbeat_interval=113.25,
|
||||||
|
header_values={
|
||||||
|
"Accept-Language": accept_language
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# cookie consent for youtube
|
||||||
|
# https://stackoverflow.com/a/66940841/16804841 doesn't work
|
||||||
|
for cookie_key, cookie_value in youtube_settings["youtube_music_consent_cookies"].items():
|
||||||
|
self.session.cookies.set(
|
||||||
|
name=cookie_key,
|
||||||
|
value=cookie_value,
|
||||||
|
path='/', domain='.youtube.com'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def heartbeat(self):
|
||||||
|
r = self.get("https://music.youtube.com/verify_session", is_heartbeat=True)
|
||||||
|
if r is None:
|
||||||
|
self.heartbeat_failed()
|
||||||
|
|
||||||
|
string = r.content.decode("utf-8")
|
||||||
|
|
||||||
|
data = json.loads(string[string.index("{"):])
|
||||||
|
success: bool = data["success"]
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
self.heartbeat_failed()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class YouTubeMusicCredentials:
|
||||||
|
api_key: str
|
||||||
|
|
||||||
|
# ctoken is probably short for continue-token
|
||||||
|
# It is probably not strictly necessary, but hey :))
|
||||||
|
ctoken: str
|
||||||
|
|
||||||
|
# the context in requests
|
||||||
|
context: dict
|
||||||
|
|
||||||
|
|
||||||
|
class YoutubeMusic(SuperYouTube):
|
||||||
|
# CHANGE
|
||||||
|
SOURCE_TYPE = SourcePages.YOUTUBE_MUSIC
|
||||||
|
LOGGER = logging_settings["youtube_music_logger"]
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.connection: YoutubeMusicConnection = YoutubeMusicConnection(logger=self.LOGGER, accept_language="en-US,en;q=0.5")
|
||||||
|
self.credentials: YouTubeMusicCredentials = YouTubeMusicCredentials(
|
||||||
|
api_key=youtube_settings["youtube_music_api_key"],
|
||||||
|
ctoken="",
|
||||||
|
context=youtube_settings["youtube_music_innertube_context"]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.start_millis = get_current_millis()
|
||||||
|
|
||||||
|
if self.credentials.api_key == "" or DEBUG_YOUTUBE_INITIALIZING:
|
||||||
|
self._fetch_from_main_page()
|
||||||
|
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def _fetch_from_main_page(self):
|
||||||
|
"""
|
||||||
|
===API=KEY===
|
||||||
|
AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30
|
||||||
|
can be found at `view-source:https://music.youtube.com/`
|
||||||
|
search for: "innertubeApiKey"
|
||||||
|
"""
|
||||||
|
|
||||||
|
r = self.connection.get("https://music.youtube.com/")
|
||||||
|
if r is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if urlparse(r.url).netloc == "consent.youtube.com":
|
||||||
|
self.LOGGER.info(f"Making cookie consent request for {type(self).__name__}.")
|
||||||
|
r = self.connection.post("https://consent.youtube.com/save", data={
|
||||||
|
'gl': 'DE',
|
||||||
|
'm': '0',
|
||||||
|
'app': '0',
|
||||||
|
'pc': 'ytm',
|
||||||
|
'continue': 'https://music.youtube.com/?cbrd=1',
|
||||||
|
'x': '6',
|
||||||
|
'bl': 'boq_identityfrontenduiserver_20230905.04_p0',
|
||||||
|
'hl': 'en',
|
||||||
|
'src': '1',
|
||||||
|
'cm': '2',
|
||||||
|
'set_ytc': 'true',
|
||||||
|
'set_apyt': 'true',
|
||||||
|
'set_eom': 'false'
|
||||||
|
})
|
||||||
|
if r is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# load cookie dict from settings
|
||||||
|
cookie_dict = youtube_settings["youtube_music_consent_cookies"]
|
||||||
|
|
||||||
|
for cookie in r.cookies:
|
||||||
|
cookie_dict[cookie.name] = cookie.value
|
||||||
|
for cookie in self.connection.session.cookies:
|
||||||
|
cookie_dict[cookie.name] = cookie.value
|
||||||
|
|
||||||
|
# save cookies in settings
|
||||||
|
youtube_settings["youtube_music_consent_cookies"] = cookie_dict
|
||||||
|
|
||||||
|
r = self.connection.get("https://music.youtube.com/")
|
||||||
|
if r is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
content = r.text
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
dump_to_file(f"youtube_music_index.html", r.text, exit_after_dump=False)
|
||||||
|
|
||||||
|
# api key
|
||||||
|
api_key_pattern = (
|
||||||
|
r"(?<=\"innertubeApiKey\":\")(.*?)(?=\")",
|
||||||
|
r"(?<=\"INNERTUBE_API_KEY\":\")(.*?)(?=\")",
|
||||||
|
)
|
||||||
|
|
||||||
|
api_keys = []
|
||||||
|
for api_key_patter in api_key_pattern:
|
||||||
|
api_keys.extend(re.findall(api_key_patter, content))
|
||||||
|
|
||||||
|
found_a_good_api_key = False
|
||||||
|
for api_key in api_keys:
|
||||||
|
# save the first api key
|
||||||
|
api_key = api_keys[0]
|
||||||
|
|
||||||
|
try:
|
||||||
|
youtube_settings["youtube_music_api_key"] = api_key
|
||||||
|
except SettingValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
found_a_good_api_key = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if found_a_good_api_key:
|
||||||
|
self.LOGGER.info(f"Found a valid API-KEY for {type(self).__name__}: \"{api_key}\"")
|
||||||
|
else:
|
||||||
|
self.LOGGER.error(f"Couldn't find an API-KEY for {type(self).__name__}. :((")
|
||||||
|
|
||||||
|
# context
|
||||||
|
context_pattern = r"(?<=\"INNERTUBE_CONTEXT\":{)(.*?)(?=},\"INNERTUBE_CONTEXT_CLIENT_NAME\":)"
|
||||||
|
found_context = False
|
||||||
|
for context_string in re.findall(context_pattern, content, re.M):
|
||||||
|
try:
|
||||||
|
youtube_settings["youtube_music_innertube_context"] = json.loads("{" + context_string + "}")
|
||||||
|
found_context = True
|
||||||
|
except json.decoder.JSONDecodeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.credentials.context = youtube_settings["youtube_music_innertube_context"]
|
||||||
|
break
|
||||||
|
|
||||||
|
if not found_context:
|
||||||
|
self.LOGGER.warning(f"Couldn't find a context for {type(self).__name__}.")
|
||||||
|
|
||||||
|
def get_source_type(self, source: Source) -> Optional[Type[DatabaseObject]]:
|
||||||
|
return super().get_source_type(source)
|
||||||
|
|
||||||
|
def general_search(self, search_query: str) -> List[DatabaseObject]:
|
||||||
|
search_query = search_query.strip()
|
||||||
|
|
||||||
|
urlescaped_query: str = quote(search_query.strip().replace(" ", "+"))
|
||||||
|
|
||||||
|
# approximate the ammount of time it would take to type the search, because google for some reason tracks that
|
||||||
|
LAST_EDITED_TIME = get_current_millis() - random.randint(0, 20)
|
||||||
|
_estimated_time = sum(len(search_query) * random.randint(50, 100) for _ in search_query.strip())
|
||||||
|
FIRST_EDITED_TIME = LAST_EDITED_TIME - _estimated_time if LAST_EDITED_TIME - self.start_millis > _estimated_time else random.randint(50, 100)
|
||||||
|
|
||||||
|
query_continue = "" if self.credentials.ctoken == "" else f"&ctoken={self.credentials.ctoken}&continuation={self.credentials.ctoken}"
|
||||||
|
|
||||||
|
# construct the request
|
||||||
|
r = self.connection.post(
|
||||||
|
url=get_youtube_url(path="/youtubei/v1/search", query=f"key={self.credentials.api_key}&prettyPrint=false"+query_continue),
|
||||||
|
json={
|
||||||
|
"context": {**self.credentials.context, "adSignalsInfo":{"params":[]}},
|
||||||
|
"query": search_query,
|
||||||
|
"suggestStats": {
|
||||||
|
"clientName": "youtube-music",
|
||||||
|
"firstEditTimeMsec": FIRST_EDITED_TIME,
|
||||||
|
"inputMethod": "KEYBOARD",
|
||||||
|
"lastEditTimeMsec": LAST_EDITED_TIME,
|
||||||
|
"originalQuery": search_query,
|
||||||
|
"parameterValidationStatus": "VALID_PARAMETERS",
|
||||||
|
"searchMethod": "ENTER_KEY",
|
||||||
|
"validationStatus": "VALID",
|
||||||
|
"zeroPrefixEnabled": True,
|
||||||
|
"availableSuggestions": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"Referer": get_youtube_url(path=f"/search", query=f"q={urlescaped_query}")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
renderer_list = r.json().get("contents", {}).get("tabbedSearchResultsRenderer", {}).get("tabs", [{}])[0].get("tabRenderer").get("content", {}).get("sectionListRenderer", {}).get("contents", [])
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
for i, content in enumerate(renderer_list):
|
||||||
|
dump_to_file(f"{i}-renderer.json", json.dumps(content), is_json=True, exit_after_dump=False)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
"""
|
||||||
|
cant use fixed indices, because if something has no entries, the list dissappears
|
||||||
|
instead I have to try parse everything, and just reject community playlists and profiles.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for renderer in renderer_list:
|
||||||
|
results.extend(parse_renderer(renderer))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def fetch_artist(self, source: Source, stop_at_level: int = 1) -> Artist:
|
||||||
|
artist = Artist()
|
||||||
|
|
||||||
|
# construct the request
|
||||||
|
url = urlparse(source.url)
|
||||||
|
browse_id = url.path.replace("/channel/", "")
|
||||||
|
|
||||||
|
r = self.connection.post(
|
||||||
|
url=get_youtube_url(path="/youtubei/v1/browse", query=f"key={self.credentials.api_key}&prettyPrint=false"),
|
||||||
|
json={
|
||||||
|
"browseId": browse_id,
|
||||||
|
"context": {**self.credentials.context, "adSignalsInfo":{"params":[]}}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if r is None:
|
||||||
|
return artist
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
dump_to_file(f"{browse_id}.json", r.text, is_json=True, exit_after_dump=False)
|
||||||
|
|
||||||
|
renderer_list = r.json().get("contents", {}).get("singleColumnBrowseResultsRenderer", {}).get("tabs", [{}])[0].get("tabRenderer", {}).get("content", {}).get("sectionListRenderer", {}).get("contents", [])
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
for i, content in enumerate(renderer_list):
|
||||||
|
dump_to_file(f"{i}-artists-renderer.json", json.dumps(content), is_json=True, exit_after_dump=False)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
"""
|
||||||
|
cant use fixed indices, because if something has no entries, the list dissappears
|
||||||
|
instead I have to try parse everything, and just reject community playlists and profiles.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for renderer in renderer_list:
|
||||||
|
results.extend(parse_renderer(renderer))
|
||||||
|
|
||||||
|
artist.add_list_of_other_objects(results)
|
||||||
|
|
||||||
|
return artist
|
||||||
|
|
||||||
|
def fetch_album(self, source: Source, stop_at_level: int = 1) -> Album:
|
||||||
|
album = Album()
|
||||||
|
|
||||||
|
parsed_url = urlparse(source.url)
|
||||||
|
list_id_list = parse_qs(parsed_url.query)['list']
|
||||||
|
if len(list_id_list) <= 0:
|
||||||
|
return album
|
||||||
|
browse_id = list_id_list[0]
|
||||||
|
|
||||||
|
r = self.connection.post(
|
||||||
|
url=get_youtube_url(path="/youtubei/v1/browse", query=f"key={self.credentials.api_key}&prettyPrint=false"),
|
||||||
|
json={
|
||||||
|
"browseId": browse_id,
|
||||||
|
"context": {**self.credentials.context, "adSignalsInfo":{"params":[]}}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if r is None:
|
||||||
|
return album
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
dump_to_file(f"{browse_id}.json", r.text, is_json=True, exit_after_dump=False)
|
||||||
|
|
||||||
|
renderer_list = r.json().get("contents", {}).get("singleColumnBrowseResultsRenderer", {}).get("tabs", [{}])[0].get("tabRenderer", {}).get("content", {}).get("sectionListRenderer", {}).get("contents", [])
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
for i, content in enumerate(renderer_list):
|
||||||
|
dump_to_file(f"{i}-album-renderer.json", json.dumps(content), is_json=True, exit_after_dump=False)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
"""
|
||||||
|
cant use fixed indices, because if something has no entries, the list dissappears
|
||||||
|
instead I have to try parse everything, and just reject community playlists and profiles.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for renderer in renderer_list:
|
||||||
|
results.extend(parse_renderer(renderer))
|
||||||
|
|
||||||
|
album.add_list_of_other_objects(results)
|
||||||
|
|
||||||
|
return album
|
||||||
|
|
||||||
|
def fetch_song(self, source: Source, stop_at_level: int = 1) -> Song:
|
||||||
|
print(source)
|
||||||
|
return Song()
|
@ -1,25 +1,33 @@
|
|||||||
from .logging import LOGGING_SECTION
|
from typing import Tuple
|
||||||
from .audio import AUDIO_SECTION
|
|
||||||
from .connection import CONNECTION_SECTION
|
|
||||||
from .misc import MISC_SECTION
|
|
||||||
from .paths import PATHS_SECTION
|
|
||||||
|
|
||||||
from .paths import LOCATIONS
|
|
||||||
from .config import Config
|
from .config import Config
|
||||||
|
from .config_files import (
|
||||||
|
main_config,
|
||||||
|
logging_config,
|
||||||
|
youtube_config,
|
||||||
|
)
|
||||||
|
|
||||||
|
_sections: Tuple[Config, ...] = (
|
||||||
config = Config()
|
main_config.config,
|
||||||
|
logging_config.config,
|
||||||
|
youtube_config.config
|
||||||
|
)
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
if not LOCATIONS.CONFIG_FILE.is_file():
|
for section in _sections:
|
||||||
write_config()
|
section.read()
|
||||||
config.read_from_config_file(LOCATIONS.CONFIG_FILE)
|
|
||||||
|
|
||||||
|
# special cases
|
||||||
|
if main_settings['tor']:
|
||||||
|
main_settings['proxies'] = {
|
||||||
|
'http': f'socks5h://127.0.0.1:{main_settings["tor_port"]}',
|
||||||
|
'https': f'socks5h://127.0.0.1:{main_settings["tor_port"]}'
|
||||||
|
}
|
||||||
|
|
||||||
def write_config():
|
def write_config():
|
||||||
config.write_to_config_file(LOCATIONS.CONFIG_FILE)
|
for section in _sections:
|
||||||
|
section.write()
|
||||||
|
|
||||||
set_name_to_value = config.set_name_to_value
|
main_settings: main_config.SettingsStructure = main_config.config.loaded_settings
|
||||||
|
logging_settings: logging_config.SettingsStructure = logging_config.config.loaded_settings
|
||||||
read_config()
|
youtube_settings: youtube_config.SettingsStructure = youtube_config.config.loaded_settings
|
||||||
|
132
src/music_kraken/utils/config/attributes/attribute.py
Normal file
132
src/music_kraken/utils/config/attributes/attribute.py
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
import re
|
||||||
|
from typing import Optional, List, Union, Iterable, Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
import toml
|
||||||
|
from copy import deepcopy, copy
|
||||||
|
from urllib.parse import urlparse, urlunparse, ParseResult
|
||||||
|
|
||||||
|
from ...exception.config import SettingValueError
|
||||||
|
from ..utils import comment
|
||||||
|
|
||||||
|
|
||||||
|
LOGGER = logging.getLogger("config")
|
||||||
|
|
||||||
|
COMMENT_PREFIX = "#"
|
||||||
|
|
||||||
|
|
||||||
|
def comment_string(uncommented: str) -> str:
|
||||||
|
unprocessed_lines = uncommented.split("\n")
|
||||||
|
|
||||||
|
processed_lines: List[str] = []
|
||||||
|
|
||||||
|
for line in unprocessed_lines:
|
||||||
|
if line.startswith(COMMENT_PREFIX) or line == "":
|
||||||
|
processed_lines.append(line)
|
||||||
|
continue
|
||||||
|
|
||||||
|
line = COMMENT_PREFIX + " " + line
|
||||||
|
processed_lines.append(line)
|
||||||
|
|
||||||
|
return "\n".join(processed_lines)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Description:
|
||||||
|
description: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
def toml_string(self):
|
||||||
|
return comment_string(self.description)
|
||||||
|
|
||||||
|
|
||||||
|
class EmptyLine(Description):
|
||||||
|
def __init__(self):
|
||||||
|
self.description = ""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Attribute:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
default_value: any,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
):
|
||||||
|
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
self.value = self._recursive_parse_object(default_value, self.parse_simple_value)
|
||||||
|
|
||||||
|
self.description: Optional[str] = description
|
||||||
|
self.loaded_settings: dict = None
|
||||||
|
|
||||||
|
def initialize_from_config(self, loaded_settings: dict):
|
||||||
|
self.loaded_settings = loaded_settings
|
||||||
|
self.loaded_settings.__setitem__(self.name, self.value, True)
|
||||||
|
|
||||||
|
def unparse_simple_value(self, value: any) -> any:
|
||||||
|
return value
|
||||||
|
|
||||||
|
def parse_simple_value(self, value: any) -> any:
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _recursive_parse_object(self, __object, callback: Callable):
|
||||||
|
__object = copy(__object)
|
||||||
|
|
||||||
|
if isinstance(__object, dict):
|
||||||
|
for key, value in __object.items():
|
||||||
|
__object[key] = self._recursive_parse_object(value, callback)
|
||||||
|
|
||||||
|
return __object
|
||||||
|
|
||||||
|
if isinstance(__object, list) or (isinstance(__object, tuple) and not isinstance(__object, ParseResult)):
|
||||||
|
for i, item in enumerate(__object):
|
||||||
|
__object[i] = self._recursive_parse_object(item, callback)
|
||||||
|
return __object
|
||||||
|
|
||||||
|
return callback(__object)
|
||||||
|
|
||||||
|
def parse(self, unparsed_value):
|
||||||
|
self.value = self._recursive_parse_object(unparsed_value, self.parse_simple_value)
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
def unparse(self, parsed_value):
|
||||||
|
return self._recursive_parse_object(parsed_value, self.unparse_simple_value)
|
||||||
|
|
||||||
|
def load_toml(self, loaded_toml: dict) -> bool:
|
||||||
|
"""
|
||||||
|
returns true if succesfull
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.name not in loaded_toml:
|
||||||
|
LOGGER.warning(f"No setting by the name {self.name} found in the settings file.")
|
||||||
|
self.loaded_settings.__setitem__(self.name, self.value, True)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.parse(loaded_toml[self.name])
|
||||||
|
except SettingValueError as settings_error:
|
||||||
|
logging.warning(settings_error)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.loaded_settings.__setitem__(self.name, self.value, True)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def toml_string(self) -> str:
|
||||||
|
string = ""
|
||||||
|
|
||||||
|
if self.description is not None:
|
||||||
|
string += comment(self.description) + "\n"
|
||||||
|
|
||||||
|
string += toml.dumps({self.name: self.unparse(self.value)})
|
||||||
|
|
||||||
|
# print(string)
|
||||||
|
return string
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.description}\n{self.name}={self.value}"
|
||||||
|
|
151
src/music_kraken/utils/config/attributes/special_attributes.py
Normal file
151
src/music_kraken/utils/config/attributes/special_attributes.py
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
from pathlib import Path, PosixPath
|
||||||
|
from typing import Optional, Dict, Set
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .attribute import Attribute
|
||||||
|
from ...exception.config import SettingValueError
|
||||||
|
|
||||||
|
|
||||||
|
class UrlAttribute(Attribute):
|
||||||
|
def parse_simple_value(self, value: any) -> any:
|
||||||
|
return urlparse(value)
|
||||||
|
|
||||||
|
def unparse_simple_value(self, value: any) -> any:
|
||||||
|
return urlunparse((value.scheme, value.netloc, value.path, value.params, value.query, value.fragment))
|
||||||
|
|
||||||
|
|
||||||
|
class PathAttribute(Attribute):
|
||||||
|
def parse_simple_value(self, value: any) -> Path:
|
||||||
|
if isinstance(value, Path) or isinstance(value, PosixPath):
|
||||||
|
return value
|
||||||
|
return Path(value)
|
||||||
|
|
||||||
|
def unparse_simple_value(self, value: Path) -> any:
|
||||||
|
return str(value.resolve())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class SelectAttribute(Attribute):
|
||||||
|
def __init__(self, name: str, default_value: any, options: tuple, description: Optional[str] = None, ignore_options_for_description = False):
|
||||||
|
self.options: tuple = options
|
||||||
|
|
||||||
|
new_description = ""
|
||||||
|
if description is not None:
|
||||||
|
new_description += description
|
||||||
|
new_description += "\n"
|
||||||
|
|
||||||
|
if not ignore_options_for_description:
|
||||||
|
new_description += f"{{{', '.join(self.options)}}}"
|
||||||
|
|
||||||
|
super().__init__(name, default_value, description)
|
||||||
|
|
||||||
|
def parse_simple_value(self, value: any) -> any:
|
||||||
|
if value in self.options:
|
||||||
|
return value
|
||||||
|
|
||||||
|
raise SettingValueError(
|
||||||
|
setting_name=self.name,
|
||||||
|
setting_value=value,
|
||||||
|
rule=f"has to be in the options: {{{', '.join(self.options)}}}."
|
||||||
|
)
|
||||||
|
|
||||||
|
def unparse_simple_value(self, value: any) -> any:
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class IntegerSelect(Attribute):
|
||||||
|
def __init__(self, name: str, default_value: any, options: Dict[int, str], description: Optional[str] = None, ignore_options_for_description = False):
|
||||||
|
self.options: Dict[str, int] = options
|
||||||
|
self.option_values: Set[int] = set(self.options.values())
|
||||||
|
|
||||||
|
new_description = ""
|
||||||
|
if description is not None:
|
||||||
|
new_description += description
|
||||||
|
|
||||||
|
description_lines = []
|
||||||
|
|
||||||
|
if description is not None:
|
||||||
|
description_lines.append(description)
|
||||||
|
|
||||||
|
description_lines.append("The values can be either an integer or one of the following values:")
|
||||||
|
|
||||||
|
for number, option in self.options.items():
|
||||||
|
description_lines.append(f"{number}: {option}")
|
||||||
|
|
||||||
|
super().__init__(name, default_value, "\n".join(description_lines))
|
||||||
|
|
||||||
|
def parse_simple_value(self, value: any) -> any:
|
||||||
|
if isinstance(value, str):
|
||||||
|
if value not in self.options:
|
||||||
|
raise SettingValueError(
|
||||||
|
setting_name=self.name,
|
||||||
|
setting_value=value,
|
||||||
|
rule=f"has to be in the options: {{{', '.join(self.options.keys())}}}, if it is a string."
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.options[value]
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def unparse_simple_value(self, value: int) -> any:
|
||||||
|
if value in self.option_values:
|
||||||
|
for option, v in self.options.items():
|
||||||
|
if v == value:
|
||||||
|
return value
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
ID3_2_FILE_FORMATS = frozenset((
|
||||||
|
"mp3", "mp2", "mp1", # MPEG-1 ID3.2
|
||||||
|
"wav", "wave", "rmi", # RIFF (including WAV) ID3.2
|
||||||
|
"aiff", "aif", "aifc", # AIFF ID3.2
|
||||||
|
"aac", "aacp", # Raw AAC ID3.2
|
||||||
|
"tta", # True Audio ID3.2
|
||||||
|
))
|
||||||
|
_sorted_id3_2_formats = sorted(ID3_2_FILE_FORMATS)
|
||||||
|
|
||||||
|
ID3_1_FILE_FORMATS = frozenset((
|
||||||
|
"ape", # Monkey's Audio ID3.1
|
||||||
|
"mpc", "mpp", "mp+", # MusePack ID3.1
|
||||||
|
"wv", # WavPack ID3.1
|
||||||
|
"ofr", "ofs" # OptimFrog ID3.1
|
||||||
|
))
|
||||||
|
_sorted_id3_1_formats = sorted(ID3_1_FILE_FORMATS)
|
||||||
|
|
||||||
|
|
||||||
|
class AudioFormatAttribute(Attribute):
|
||||||
|
def __init__(self, name: str, default_value: any, description: Optional[str] = None, ignore_options_for_description = False):
|
||||||
|
new_description = ""
|
||||||
|
if description is not None:
|
||||||
|
new_description += description
|
||||||
|
new_description += "\n"
|
||||||
|
|
||||||
|
new_description += f"ID3.2: {{{', '.join(ID3_2_FILE_FORMATS)}}}\n"
|
||||||
|
new_description += f"ID3.1: {{{', '.join(ID3_1_FILE_FORMATS)}}}"
|
||||||
|
|
||||||
|
super().__init__(name, default_value, description)
|
||||||
|
|
||||||
|
def parse_simple_value(self, value: any) -> any:
|
||||||
|
value = value.strip().lower()
|
||||||
|
if value in ID3_2_FILE_FORMATS:
|
||||||
|
return value
|
||||||
|
if value in ID3_1_FILE_FORMATS:
|
||||||
|
logging.debug(f"setting audio format to a format that only supports ID3.1: {v}")
|
||||||
|
return value
|
||||||
|
|
||||||
|
raise SettingValueError(
|
||||||
|
setting_name=self.name,
|
||||||
|
setting_value=value,
|
||||||
|
rule="has to be a valid audio format, supporting id3 metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
def unparse_simple_value(self, value: any) -> any:
|
||||||
|
return value
|
||||||
|
|
||||||
|
class LoggerAttribute(Attribute):
|
||||||
|
def parse_simple_value(self, value: str) -> logging.Logger:
|
||||||
|
return logging.getLogger(value)
|
||||||
|
|
||||||
|
def unparse_simple_value(self, value: logging.Logger) -> any:
|
||||||
|
return value.name
|
@ -1,234 +0,0 @@
|
|||||||
import logging
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import Optional, List, Union, Dict
|
|
||||||
|
|
||||||
from ..exception.config import SettingNotFound, SettingValueError
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger("config")
|
|
||||||
|
|
||||||
COMMENT_PREFIX = "#"
|
|
||||||
|
|
||||||
|
|
||||||
def comment_string(uncommented: str) -> str:
|
|
||||||
unprocessed_lines = uncommented.split("\n")
|
|
||||||
|
|
||||||
processed_lines: List[str] = []
|
|
||||||
|
|
||||||
for line in unprocessed_lines:
|
|
||||||
line: str = line.strip()
|
|
||||||
if line.startswith(COMMENT_PREFIX) or line == "":
|
|
||||||
processed_lines.append(line)
|
|
||||||
continue
|
|
||||||
|
|
||||||
line = COMMENT_PREFIX + " " + line
|
|
||||||
processed_lines.append(line)
|
|
||||||
|
|
||||||
return "\n".join(processed_lines)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Attribute:
|
|
||||||
name: str
|
|
||||||
description: Optional[str]
|
|
||||||
value: Union[str, List[str]]
|
|
||||||
|
|
||||||
def validate(self, value: str):
|
|
||||||
"""
|
|
||||||
This function validates a new value without setting it.
|
|
||||||
|
|
||||||
:raise SettingValueError:
|
|
||||||
:param value:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def set_value(self, value: str):
|
|
||||||
"""
|
|
||||||
:raise SettingValueError: if the value is invalid for this setting
|
|
||||||
:param value:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
self.validate(value)
|
|
||||||
|
|
||||||
self.value = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description_as_comment(self):
|
|
||||||
return comment_string(self.description)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def object_from_value(self):
|
|
||||||
return self.value
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"{self.description_as_comment}\n{self.name}={self.value}"
|
|
||||||
|
|
||||||
|
|
||||||
class SingleAttribute(Attribute):
|
|
||||||
value: str
|
|
||||||
|
|
||||||
|
|
||||||
class StringAttribute(SingleAttribute):
|
|
||||||
@property
|
|
||||||
def object_from_value(self) -> str:
|
|
||||||
return self.value.strip()
|
|
||||||
|
|
||||||
|
|
||||||
class IntAttribute(SingleAttribute):
|
|
||||||
def validate(self, value: str):
|
|
||||||
if not value.isdigit():
|
|
||||||
raise SettingValueError(
|
|
||||||
setting_name=self.name,
|
|
||||||
setting_value=value,
|
|
||||||
rule="has to be a digit (an int)"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def object_from_value(self) -> int:
|
|
||||||
if self.value.isdigit():
|
|
||||||
return int(self.value)
|
|
||||||
|
|
||||||
|
|
||||||
class BoolAttribute(SingleAttribute):
|
|
||||||
def validate(self, value: str):
|
|
||||||
if value.lower().strip() not in {"true", "false"}:
|
|
||||||
raise SettingValueError(
|
|
||||||
setting_name=self.name,
|
|
||||||
setting_value=value,
|
|
||||||
rule="has to be a bool (true/false)"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def object_from_value(self) -> bool:
|
|
||||||
return self.value.lower().strip() in {"yes", "y", "t", "true"}
|
|
||||||
|
|
||||||
|
|
||||||
class FloatAttribute(SingleAttribute):
|
|
||||||
def validate(self, value: str):
|
|
||||||
try:
|
|
||||||
float(value)
|
|
||||||
except ValueError:
|
|
||||||
raise SettingValueError(
|
|
||||||
setting_name=self.name,
|
|
||||||
setting_value=value,
|
|
||||||
rule="has to be numeric (an int or float)"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def object_from_value(self) -> float:
|
|
||||||
return float(self.value)
|
|
||||||
|
|
||||||
|
|
||||||
class ListAttribute(Attribute):
|
|
||||||
value: List[str]
|
|
||||||
|
|
||||||
has_default_values: bool = True
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.value)
|
|
||||||
|
|
||||||
def set_value(self, value: str):
|
|
||||||
"""
|
|
||||||
Due to lists being represented as multiple lines with the same key,
|
|
||||||
this appends, rather than setting anything.
|
|
||||||
|
|
||||||
:raise SettingValueError:
|
|
||||||
:param value:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
self.validate(value)
|
|
||||||
|
|
||||||
# resetting the list to an empty list, if this is the first config line to load
|
|
||||||
if self.has_default_values:
|
|
||||||
self.value = []
|
|
||||||
self.has_default_values = False
|
|
||||||
|
|
||||||
if value in self.value:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.value.append(value)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"{self.description_as_comment}\n" + \
|
|
||||||
"\n".join(f"{self.name}={element}" for element in self.value)
|
|
||||||
|
|
||||||
def single_object_from_element(self, value: str):
|
|
||||||
return value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def object_from_value(self) -> list:
|
|
||||||
"""
|
|
||||||
THIS IS NOT THE PROPERTY TO OVERRIDE WHEN INHERITING ListAttribute
|
|
||||||
single_object_from_element
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
|
|
||||||
parsed = list()
|
|
||||||
for raw in self.value:
|
|
||||||
parsed.append(self.single_object_from_element(raw))
|
|
||||||
|
|
||||||
return parsed
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Description:
|
|
||||||
description: str
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return comment_string(self.description)
|
|
||||||
|
|
||||||
|
|
||||||
class EmptyLine(Description):
|
|
||||||
def __init__(self):
|
|
||||||
self.description = ""
|
|
||||||
|
|
||||||
|
|
||||||
class Section:
|
|
||||||
"""
|
|
||||||
A placeholder class
|
|
||||||
"""
|
|
||||||
attribute_list: List[Union[
|
|
||||||
Attribute,
|
|
||||||
Description
|
|
||||||
]]
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.name_attribute_map: Dict[str, Attribute] = dict()
|
|
||||||
self.index_values()
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "\n".join(attribute.__str__() for attribute in self.attribute_list)
|
|
||||||
|
|
||||||
def index_values(self):
|
|
||||||
for element in self.attribute_list:
|
|
||||||
if not isinstance(element, Attribute):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if element.name in self.name_attribute_map:
|
|
||||||
raise ValueError(f"Two different Attributes have the same name: "
|
|
||||||
f"{self.name_attribute_map[element.name]} {element}")
|
|
||||||
|
|
||||||
self.name_attribute_map[element.name] = element
|
|
||||||
|
|
||||||
def modify_setting(self, setting_name: str, new_value: str):
|
|
||||||
"""
|
|
||||||
:raise SettingValueError, SettingNotFound:
|
|
||||||
:param setting_name:
|
|
||||||
:param new_value:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
|
|
||||||
if setting_name not in self.name_attribute_map:
|
|
||||||
raise SettingNotFound(
|
|
||||||
setting_name=setting_name
|
|
||||||
)
|
|
||||||
|
|
||||||
self.name_attribute_map[setting_name].set_value(new_value)
|
|
||||||
|
|
||||||
def reset_list_attribute(self):
|
|
||||||
for attribute in self.attribute_list:
|
|
||||||
if not isinstance(attribute, ListAttribute):
|
|
||||||
continue
|
|
||||||
|
|
||||||
attribute.has_default_values = True
|
|
@ -1,127 +1,68 @@
|
|||||||
from typing import Union, Tuple, Dict, Iterable, List
|
from typing import Any, Tuple, Union
|
||||||
from datetime import datetime
|
from pathlib import Path
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
|
|
||||||
from ..exception.config import SettingNotFound, SettingValueError
|
import toml
|
||||||
from ..path_manager import LOCATIONS
|
|
||||||
from .base_classes import Description, Attribute, Section, EmptyLine, COMMENT_PREFIX
|
from .attributes.attribute import Attribute, Description, EmptyLine
|
||||||
from .audio import AUDIO_SECTION
|
|
||||||
from .logging import LOGGING_SECTION
|
|
||||||
from .connection import CONNECTION_SECTION
|
|
||||||
from .misc import MISC_SECTION
|
|
||||||
from .paths import PATHS_SECTION
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger("config")
|
class ConfigDict(dict):
|
||||||
|
def __init__(self, config_reference: "Config", *args, **kwargs):
|
||||||
|
self.config_reference: Config = config_reference
|
||||||
|
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __getattribute__(self, __name: str) -> Any:
|
||||||
|
return super().__getattribute__(__name)
|
||||||
|
|
||||||
|
def __setitem__(self, __key: Any, __value: Any, from_attribute: bool = False, is_parsed: bool = False) -> None:
|
||||||
|
if not from_attribute:
|
||||||
|
attribute: Attribute = self.config_reference.attribute_map[__key]
|
||||||
|
if is_parsed:
|
||||||
|
attribute.value = __value
|
||||||
|
else:
|
||||||
|
attribute.parse(__value)
|
||||||
|
self.config_reference.write()
|
||||||
|
|
||||||
|
__value = attribute.value
|
||||||
|
|
||||||
|
return super().__setitem__(__key, __value)
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
def __init__(self):
|
def __init__(self, componet_list: Tuple[Union[Attribute, Description, EmptyLine]], config_file: Path) -> None:
|
||||||
self.config_elements: Tuple[Union[Description, Attribute, Section], ...] = (
|
self.config_file: Path = config_file
|
||||||
Description("IMPORTANT: If you modify this file, the changes for the actual setting, will be kept as is.\n"
|
|
||||||
"The changes you make to the comments, will be discarded, next time you run music-kraken. "
|
|
||||||
"Have fun!"),
|
|
||||||
Description(f"Latest reset: {datetime.now()}"),
|
|
||||||
Description("Those are all Settings for the audio codec.\n"
|
|
||||||
"If you, for some reason wanna fill your drive real quickly, I mean enjoy HIFI music,\n"
|
|
||||||
"feel free to tinker with the Bitrate or smth. :)"),
|
|
||||||
AUDIO_SECTION,
|
|
||||||
Description("Modify how Music-Kraken connects to the internet:"),
|
|
||||||
CONNECTION_SECTION,
|
|
||||||
Description("Modify all your paths, except your config file..."),
|
|
||||||
PATHS_SECTION,
|
|
||||||
Description("For all your Logging needs.\n"
|
|
||||||
"If you found a bug, and wan't to report it, please set the Logging level to 0,\n"
|
|
||||||
"reproduce the bug, and attach the logfile in the bugreport. ^w^"),
|
|
||||||
LOGGING_SECTION,
|
|
||||||
Description("If there are stupid settings, they are here."),
|
|
||||||
MISC_SECTION,
|
|
||||||
Description("🏳️⚧️🏳️⚧️ Protect trans youth. 🏳️⚧️🏳️⚧️\n"),
|
|
||||||
)
|
|
||||||
|
|
||||||
self._length = 0
|
self.component_list: Tuple[Union[Attribute, Description, EmptyLine]] = componet_list
|
||||||
self._section_list: List[Section] = []
|
self.loaded_settings: ConfigDict = ConfigDict(self)
|
||||||
self._name_section_map: Dict[str, Section] = dict()
|
|
||||||
|
|
||||||
for element in self.config_elements:
|
self.attribute_map = {}
|
||||||
if not isinstance(element, Section):
|
for component in self.component_list:
|
||||||
|
if not isinstance(component, Attribute):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self._section_list.append(element)
|
component.initialize_from_config(self.loaded_settings)
|
||||||
for name in element.name_attribute_map:
|
self.attribute_map[component.name] = component
|
||||||
if name in self._name_section_map:
|
|
||||||
raise ValueError(f"Two sections have the same name: "
|
|
||||||
f"{name}: "
|
|
||||||
f"{element.__class__.__name__} {self._name_section_map[name].__class__.__name__}")
|
|
||||||
|
|
||||||
self._name_section_map[name] = element
|
|
||||||
self._length += 1
|
|
||||||
|
|
||||||
def set_name_to_value(self, name: str, value: str, silent: bool = True):
|
|
||||||
"""
|
|
||||||
:raises SettingValueError, SettingNotFound:
|
|
||||||
:param name:
|
|
||||||
:param value:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if name not in self._name_section_map:
|
|
||||||
if silent:
|
|
||||||
LOGGER.warning(f"The setting \"{name}\" is either deprecated, or doesn't exist.")
|
|
||||||
return
|
|
||||||
raise SettingNotFound(setting_name=name)
|
|
||||||
|
|
||||||
LOGGER.debug(f"setting: {name} value: {value}")
|
|
||||||
|
|
||||||
self._name_section_map[name].modify_setting(setting_name=name, new_value=value)
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self._length
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def config_string(self) -> str:
|
def toml_string(self):
|
||||||
return "\n\n".join(str(element) for element in self.config_elements)
|
return "\n".join(component.toml_string for component in self.component_list)
|
||||||
|
|
||||||
def _parse_conf_line(self, line: str, index: int):
|
def write(self):
|
||||||
"""
|
with self.config_file.open("w") as conf_file:
|
||||||
:raises SettingValueError, SettingNotFound:
|
conf_file.write(self.toml_string)
|
||||||
:param line:
|
|
||||||
:param index:
|
def read(self):
|
||||||
:return:
|
if not self.config_file.is_file():
|
||||||
"""
|
logging.info(f"Config file at '{self.config_file}' doesn't exist => generating")
|
||||||
line = line.strip()
|
self.write()
|
||||||
if line.startswith(COMMENT_PREFIX):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if line == "":
|
toml_data = {}
|
||||||
return
|
with self.config_file.open("r") as conf_file:
|
||||||
|
toml_data = toml.load(conf_file)
|
||||||
|
|
||||||
if "=" not in line:
|
for component in self.component_list:
|
||||||
"""
|
if isinstance(component, Attribute):
|
||||||
TODO
|
component.load_toml(toml_data)
|
||||||
No value error but custom conf error
|
|
||||||
"""
|
|
||||||
raise ValueError(f"Couldn't find the '=' in line {index}.")
|
|
||||||
|
|
||||||
line_segments = line.split("=")
|
|
||||||
name = line_segments[0]
|
|
||||||
value = "=".join(line_segments[1:])
|
|
||||||
|
|
||||||
self.set_name_to_value(name, value)
|
|
||||||
|
|
||||||
def read_from_config_file(self, path: os.PathLike):
|
|
||||||
with open(path, "r", encoding=LOCATIONS.FILE_ENCODING) as conf_file:
|
|
||||||
for section in self._section_list:
|
|
||||||
section.reset_list_attribute()
|
|
||||||
|
|
||||||
for i, line in enumerate(conf_file):
|
|
||||||
self._parse_conf_line(line, i+1)
|
|
||||||
|
|
||||||
def write_to_config_file(self, path: os.PathLike):
|
|
||||||
with open(path, "w", encoding=LOCATIONS.FILE_ENCODING) as conf_file:
|
|
||||||
conf_file.write(self.config_string)
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterable[Attribute]:
|
|
||||||
for section in self._section_list:
|
|
||||||
for name, attribute in section.name_attribute_map.items():
|
|
||||||
yield attribute
|
|
||||||
|
99
src/music_kraken/utils/config/config_files/logging_config.py
Normal file
99
src/music_kraken/utils/config/config_files/logging_config.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
from typing import TypedDict, List
|
||||||
|
from urllib.parse import ParseResult
|
||||||
|
from logging import Logger
|
||||||
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ...path_manager import LOCATIONS
|
||||||
|
from ..config import Config
|
||||||
|
from ..attributes.attribute import Attribute, EmptyLine
|
||||||
|
from ..attributes.special_attributes import (
|
||||||
|
IntegerSelect,
|
||||||
|
LoggerAttribute
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
config = Config([
|
||||||
|
Attribute(name="logging_format", default_value="%(levelname)s:%(name)s:%(message)s", description="""Logging settings for the actual logging:
|
||||||
|
Reference for the logging formats: https://docs.python.org/3/library/logging.html#logrecord-attributes"""),
|
||||||
|
IntegerSelect(
|
||||||
|
name="log_level",
|
||||||
|
default_value=logging.INFO,
|
||||||
|
options={
|
||||||
|
"CRITICAL": 50,
|
||||||
|
"ERROR": 40,
|
||||||
|
"WARNING": 30,
|
||||||
|
"INFO": 20,
|
||||||
|
"DEBUG": 10,
|
||||||
|
"NOTSET": 0
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
LoggerAttribute(
|
||||||
|
name="download_logger",
|
||||||
|
description="The logger for downloading.",
|
||||||
|
default_value="download"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="tagging_logger",
|
||||||
|
description="The logger for tagging id3 containers.",
|
||||||
|
default_value="tagging"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="codex_logger",
|
||||||
|
description="The logger for streaming the audio into an uniform codex.",
|
||||||
|
default_value="codex"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="object_logger",
|
||||||
|
description="The logger for creating Data-Objects.",
|
||||||
|
default_value="object"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="database_logger",
|
||||||
|
description="The logger for Database operations.",
|
||||||
|
default_value="database"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="musify_logger",
|
||||||
|
description="The logger for the musify scraper.",
|
||||||
|
default_value="musify"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="youtube_logger",
|
||||||
|
description="The logger for the youtube scraper.",
|
||||||
|
default_value="youtube"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="youtube_music_logger",
|
||||||
|
description="The logger for the youtube music scraper.\n(The scraper is seperate to the youtube scraper)",
|
||||||
|
default_value="youtube_music"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="metal_archives_logger",
|
||||||
|
description="The logger for the metal archives scraper.",
|
||||||
|
default_value="metal_archives"
|
||||||
|
),
|
||||||
|
LoggerAttribute(
|
||||||
|
name="genius_logger",
|
||||||
|
description="The logger for the genius scraper",
|
||||||
|
default_value="genius"
|
||||||
|
),
|
||||||
|
|
||||||
|
], LOCATIONS.get_config_file("logging"))
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsStructure(TypedDict):
|
||||||
|
# logging
|
||||||
|
logging_format: str
|
||||||
|
log_level: int
|
||||||
|
download_logger: Logger
|
||||||
|
tagging_logger: Logger
|
||||||
|
codex_logger: Logger
|
||||||
|
object_logger: Logger
|
||||||
|
database_logger: Logger
|
||||||
|
musify_logger: Logger
|
||||||
|
youtube_logger: Logger
|
||||||
|
youtube_music_logger: Logger
|
||||||
|
metal_archives_logger: Logger
|
||||||
|
genius_logger: Logger
|
145
src/music_kraken/utils/config/config_files/main_config.py
Normal file
145
src/music_kraken/utils/config/config_files/main_config.py
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
from typing import TypedDict, List
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import ParseResult
|
||||||
|
from logging import Logger
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ...path_manager import LOCATIONS
|
||||||
|
from ..config import Config
|
||||||
|
from ..attributes.attribute import Attribute, EmptyLine, Description
|
||||||
|
from ..attributes.special_attributes import (
|
||||||
|
SelectAttribute,
|
||||||
|
PathAttribute,
|
||||||
|
AudioFormatAttribute,
|
||||||
|
)
|
||||||
|
|
||||||
|
config = Config([
|
||||||
|
Description(f"""IMPORTANT: If you modify this file, the changes for the actual setting, will be kept as is.
|
||||||
|
The changes you make to the comments, will be discarded, next time you run music-kraken. Have fun!
|
||||||
|
|
||||||
|
Latest reset: {datetime.now()}
|
||||||
|
|
||||||
|
_____
|
||||||
|
/ ____|
|
||||||
|
| | __ __ _ _ _
|
||||||
|
| | |_ | / _` || | | |
|
||||||
|
| |__| || (_| || |_| |
|
||||||
|
\_____| \__,_| \__, |
|
||||||
|
__/ |
|
||||||
|
|___/
|
||||||
|
"""),
|
||||||
|
|
||||||
|
Attribute(name="hasnt_yet_started", default_value=False, description="This will be set automatically, to look if it needs to run the scripts that run on start."),
|
||||||
|
Attribute(name="bitrate", default_value=125, description="Streams the audio with given bitrate [kB/s]. Can't stream with a higher Bitrate, than the audio source provides."),
|
||||||
|
AudioFormatAttribute(name="audio_format", default_value="mp3", description="""Music Kraken will stream the audio into this format.
|
||||||
|
You can use Audio formats which support ID3.2 and ID3.1,
|
||||||
|
but you will have cleaner Metadata using ID3.2."""),
|
||||||
|
|
||||||
|
Attribute(name="result_history", default_value=False, description="""If enabled, you can go back to the previous results.
|
||||||
|
The consequence is a higher meory consumption, because every result is saved."""),
|
||||||
|
Attribute(name="history_length", default_value=8, description="""You can choose how far back you can go in the result history.
|
||||||
|
The further you choose to be able to go back, the higher the memory usage.
|
||||||
|
'-1' removes the Limit entirely."""),
|
||||||
|
|
||||||
|
EmptyLine(),
|
||||||
|
|
||||||
|
Attribute(name="sort_by_date", default_value=True, description="If this is set to true, it will set the albumsort attribute such that,\nthe albums are sorted by date"),
|
||||||
|
Attribute(name="sort_album_by_type", default_value=True, description="""If this is set to true, it will set the albumsort attribute such that,
|
||||||
|
the albums are put into categories before being sorted.
|
||||||
|
This means for example, the Studio Albums and EP's are always in front of Singles, and Compilations are in the back."""),
|
||||||
|
Attribute(name="download_path", default_value="{genre}/{artist}/{album}", description="""There are multiple fields, you can use for the path and file name:
|
||||||
|
- genre
|
||||||
|
- label
|
||||||
|
- artist
|
||||||
|
- album
|
||||||
|
- song
|
||||||
|
- album_type
|
||||||
|
The folder music kraken should put the songs into."""),
|
||||||
|
Attribute(name="download_file", default_value="{song}.{audio_format}", description="The filename of the audio file."),
|
||||||
|
SelectAttribute(name="album_type_blacklist", default_value=[
|
||||||
|
"Compilation Album",
|
||||||
|
"Live Album",
|
||||||
|
"Mixtape"
|
||||||
|
], options=("Studio Album", "EP (Extended Play)", "Single", "Live Album", "Compilation Album", "Mixtape", "Demo", "Other"), description="""Music Kraken ignores all albums of those types.
|
||||||
|
Following album types exist in the programm:"""),
|
||||||
|
|
||||||
|
EmptyLine(),
|
||||||
|
|
||||||
|
Attribute(name="proxies", default_value=[], description="This is a dictionary."),
|
||||||
|
Attribute(name="tor", default_value=False, description="""Route ALL traffic through Tor.
|
||||||
|
If you use Tor, make sure the Tor browser is installed, and running.I can't guarantee maximum security though!"""),
|
||||||
|
Attribute(name="tor_port", default_value=9150, description="The port, tor is listening. If tor is already working, don't change it."),
|
||||||
|
|
||||||
|
Attribute(name="chunk_size", default_value=1024, description="Size of the chunks that are streamed.\nHere could be some room for improvement."),
|
||||||
|
Attribute(name="show_download_errors_threshold", default_value=0.3, description="""If the percentage of failed downloads goes over this threshold,
|
||||||
|
all the error messages are shown."""),
|
||||||
|
|
||||||
|
EmptyLine(),
|
||||||
|
|
||||||
|
PathAttribute(name="music_directory", default_value=LOCATIONS.MUSIC_DIRECTORY.resolve(), description="The directory, all the music will be downloaded to."),
|
||||||
|
PathAttribute(name="temp_directory", default_value=LOCATIONS.TEMP_DIRECTORY.resolve(), description="All temporary stuff is gonna be dumped in this directory."),
|
||||||
|
PathAttribute(name="log_file", default_value=LOCATIONS.get_log_file("download_logs.log").resolve()),
|
||||||
|
PathAttribute(name="ffmpeg_binary", default_value=LOCATIONS.FFMPEG_BIN.resolve(), description="Set the path to the ffmpeg binary."),
|
||||||
|
Attribute(
|
||||||
|
name="not_a_genre_regex",
|
||||||
|
description="These regular expressions tell music-kraken, which sub-folders of the music-directory\n"
|
||||||
|
"it should ignore, and not count to genres",
|
||||||
|
default_value=[
|
||||||
|
r'^\.' # is hidden/starts with a "."
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
|
EmptyLine(),
|
||||||
|
|
||||||
|
Attribute(name="happy_messages", default_value=[
|
||||||
|
"Support the artist.",
|
||||||
|
"Star Me: https://github.com/HeIIow2/music-downloader",
|
||||||
|
"🏳️⚧️🏳️⚧️ Trans rights are human rights. 🏳️⚧️🏳️⚧️",
|
||||||
|
"🏳️⚧️🏳️⚧️ Trans women are women, trans men are men, and enbies are enbies. 🏳️⚧️🏳️⚧️",
|
||||||
|
"🏴☠️🏴☠️ Unite under one flag, fck borders. 🏴☠️🏴☠️",
|
||||||
|
"Join my Matrix Space: https://matrix.to/#/#music-kraken:matrix.org",
|
||||||
|
"BPJM does cencorship.",
|
||||||
|
"🏳️⚧️🏳️⚧️ Protect trans youth. 🏳️⚧️🏳️⚧️",
|
||||||
|
"Klassenkampf",
|
||||||
|
"Rise Proletarians!!"
|
||||||
|
], description="""Just some nice and wholesome messages.
|
||||||
|
If your mindset has traits of a [file corruption], you might not agree.
|
||||||
|
But anyways... Freedom of thought, so go ahead and change the messages."""),
|
||||||
|
Attribute(name="modify_gc", default_value=True),
|
||||||
|
Attribute(name="id_bits", default_value=64, description="I really dunno why I even made this a setting.. Modifying this is a REALLY dumb idea."),
|
||||||
|
Description("🏳️⚧️🏳️⚧️ Protect trans youth. 🏳️⚧️🏳️⚧️\n"),
|
||||||
|
|
||||||
|
], LOCATIONS.get_config_file("main"))
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsStructure(TypedDict):
|
||||||
|
hasnt_yet_started: bool
|
||||||
|
result_history: bool
|
||||||
|
history_length: int
|
||||||
|
happy_messages: List[str]
|
||||||
|
modify_gc: bool
|
||||||
|
id_bits: int
|
||||||
|
|
||||||
|
# audio
|
||||||
|
bitrate: int
|
||||||
|
audio_format: str
|
||||||
|
sort_by_date: bool
|
||||||
|
sort_album_by_type: bool
|
||||||
|
download_path: str
|
||||||
|
download_file: str
|
||||||
|
album_type_blacklist: List[str]
|
||||||
|
|
||||||
|
# connection
|
||||||
|
proxies: List[dict[str, str]]
|
||||||
|
tor: bool
|
||||||
|
tor_port: int
|
||||||
|
chunk_size: int
|
||||||
|
show_download_errors_threshold: float
|
||||||
|
|
||||||
|
# paths
|
||||||
|
music_directory: Path
|
||||||
|
temp_directory: Path
|
||||||
|
log_file: Path
|
||||||
|
not_a_genre_regex: List[str]
|
||||||
|
ffmpeg_binary: Path
|
||||||
|
|
103
src/music_kraken/utils/config/config_files/youtube_config.py
Normal file
103
src/music_kraken/utils/config/config_files/youtube_config.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
from typing import TypedDict, List
|
||||||
|
from urllib.parse import ParseResult
|
||||||
|
from logging import Logger
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ...path_manager import LOCATIONS
|
||||||
|
from ..config import Config
|
||||||
|
from ..attributes.attribute import Attribute
|
||||||
|
from ..attributes.special_attributes import SelectAttribute, PathAttribute, UrlAttribute
|
||||||
|
|
||||||
|
|
||||||
|
config = Config([
|
||||||
|
UrlAttribute(name="invidious_instance", default_value="https://yt.artemislena.eu", description="""This is an attribute, where you can define the invidious instances,
|
||||||
|
the youtube downloader should use.
|
||||||
|
Here is a list of active ones: https://docs.invidious.io/instances/
|
||||||
|
Instances that use cloudflare or have source code changes could cause issues.
|
||||||
|
Hidden instances (.onion) will only work, when setting 'tor=true'."""),
|
||||||
|
UrlAttribute(name="piped_instance", default_value="https://piped-api.privacy.com.de", description="""This is an attribute, where you can define the pioed instances,
|
||||||
|
the youtube downloader should use.
|
||||||
|
Here is a list of active ones: https://github.com/TeamPiped/Piped/wiki/Instances
|
||||||
|
Instances that use cloudflare or have source code changes could cause issues.
|
||||||
|
Hidden instances (.onion) will only work, when setting 'tor=true"""),
|
||||||
|
Attribute(name="sleep_after_youtube_403", default_value=30, description="The time to wait, after youtube returned 403 (in seconds)"),
|
||||||
|
Attribute(name="youtube_music_api_key", default_value="AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30", description="""This is the API key used by YouTube-Music internally.
|
||||||
|
Dw. if it is empty, Rachel will fetch it automatically for you <333
|
||||||
|
(she will also update outdated api keys/those that don't work)"""),
|
||||||
|
Attribute(name="youtube_music_clean_data", default_value=True, description="If set to true, it exclusively fetches artists/albums/songs, not things like user channels etc."),
|
||||||
|
UrlAttribute(name="youtube_url", default_value=[
|
||||||
|
"https://www.youtube.com/",
|
||||||
|
"https://www.youtu.be/"
|
||||||
|
], description="""This is used to detect, if an url is from youtube, or any alternativ frontend.
|
||||||
|
If any instance seems to be missing, run music kraken with the -f flag."""),
|
||||||
|
Attribute(name="use_sponsor_block", default_value=True, description="Use sponsor block to remove adds or simmilar from the youtube videos."),
|
||||||
|
|
||||||
|
Attribute(name="youtube_music_consent_cookies", default_value={
|
||||||
|
"CONSENT": "PENDING+258"
|
||||||
|
}, description="The cookie with the key CONSENT says to what stuff you agree. Per default you decline all cookies, but it honestly doesn't matter."),
|
||||||
|
|
||||||
|
Attribute(name="youtube_music_innertube_context", default_value={
|
||||||
|
"client": {
|
||||||
|
"hl": "en",
|
||||||
|
"gl": "DE",
|
||||||
|
"remoteHost": "87.123.241.77",
|
||||||
|
"deviceMake": "",
|
||||||
|
"deviceModel": "",
|
||||||
|
"visitorData": "CgtiTUxaTHpoXzk1Zyia59WlBg%3D%3D",
|
||||||
|
"userAgent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
|
||||||
|
"clientName": "WEB_REMIX",
|
||||||
|
"clientVersion": "1.20230710.01.00",
|
||||||
|
"osName": "X11",
|
||||||
|
"osVersion": "",
|
||||||
|
"originalUrl": "https://music.youtube.com/",
|
||||||
|
"platform": "DESKTOP",
|
||||||
|
"clientFormFactor": "UNKNOWN_FORM_FACTOR",
|
||||||
|
"configInfo": {
|
||||||
|
"appInstallData": "",
|
||||||
|
"coldConfigData": "",
|
||||||
|
"coldHashData": "",
|
||||||
|
"hotHashData": ""
|
||||||
|
},
|
||||||
|
"userInterfaceTheme": "USER_INTERFACE_THEME_DARK",
|
||||||
|
"timeZone": "Atlantic/Jan_Mayen",
|
||||||
|
"browserName": "Firefox",
|
||||||
|
"browserVersion": "115.0",
|
||||||
|
"acceptHeader": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||||
|
"deviceExperimentId": "ChxOekkxTmpnek16UTRNVFl4TkRrek1ETTVOdz09EJrn1aUGGJrn1aUG",
|
||||||
|
"screenWidthPoints": 584,
|
||||||
|
"screenHeightPoints": 939,
|
||||||
|
"screenPixelDensity": 1,
|
||||||
|
"screenDensityFloat": 1,
|
||||||
|
"utcOffsetMinutes": 120,
|
||||||
|
"musicAppInfo": {
|
||||||
|
"pwaInstallabilityStatus": "PWA_INSTALLABILITY_STATUS_UNKNOWN",
|
||||||
|
"webDisplayMode": "WEB_DISPLAY_MODE_BROWSER",
|
||||||
|
"storeDigitalGoodsApiSupportStatus": {
|
||||||
|
"playStoreDigitalGoodsApiSupportStatus": "DIGITAL_GOODS_API_SUPPORT_STATUS_UNSUPPORTED"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"user": { "lockedSafetyMode": False },
|
||||||
|
"request": {
|
||||||
|
"useSsl": True,
|
||||||
|
"internalExperimentFlags": [],
|
||||||
|
"consistencyTokenJars": []
|
||||||
|
},
|
||||||
|
"adSignalsInfo": {
|
||||||
|
"params": []
|
||||||
|
}
|
||||||
|
}, description="Don't bother about this. It is something technical, but if you wanna change the innertube requests... go on.")
|
||||||
|
], LOCATIONS.get_config_file("youtube"))
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsStructure(TypedDict):
|
||||||
|
# youtube
|
||||||
|
invidious_instance: ParseResult
|
||||||
|
piped_instance: ParseResult
|
||||||
|
sleep_after_youtube_403: float
|
||||||
|
youtube_music_api_key: str
|
||||||
|
youtube_music_clean_data: bool
|
||||||
|
youtube_url: List[ParseResult]
|
||||||
|
use_sponsor_block: bool
|
||||||
|
youtube_music_innertube_context: dict
|
||||||
|
youtube_music_consent_cookies: dict
|
0
src/music_kraken/utils/config/sections/__init__.py
Normal file
0
src/music_kraken/utils/config/sections/__init__.py
Normal file
@ -1,6 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .base_classes import (
|
from ..base_classes import (
|
||||||
SingleAttribute,
|
SingleAttribute,
|
||||||
FloatAttribute,
|
FloatAttribute,
|
||||||
StringAttribute,
|
StringAttribute,
|
||||||
@ -10,8 +10,8 @@ from .base_classes import (
|
|||||||
BoolAttribute,
|
BoolAttribute,
|
||||||
ListAttribute
|
ListAttribute
|
||||||
)
|
)
|
||||||
from ...utils.enums.album import AlbumType
|
from ...enums.album import AlbumType
|
||||||
from ...utils.exception.config import SettingValueError
|
from ...exception.config import SettingValueError
|
||||||
|
|
||||||
# Only the formats with id3 metadata can be used
|
# Only the formats with id3 metadata can be used
|
||||||
# https://www.audioranger.com/audio-formats.php
|
# https://www.audioranger.com/audio-formats.php
|
@ -1,9 +1,9 @@
|
|||||||
from urllib.parse import urlparse, ParseResult
|
from urllib.parse import urlparse, ParseResult
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from .base_classes import Section, FloatAttribute, IntAttribute, BoolAttribute, ListAttribute, StringAttribute
|
from ..base_classes import Section, FloatAttribute, IntAttribute, BoolAttribute, ListAttribute, StringAttribute
|
||||||
from ..regex import URL_PATTERN
|
from ...regex import URL_PATTERN
|
||||||
from ..exception.config import SettingValueError
|
from ...exception.config import SettingValueError
|
||||||
|
|
||||||
|
|
||||||
class ProxAttribute(ListAttribute):
|
class ProxAttribute(ListAttribute):
|
||||||
@ -107,6 +107,18 @@ class ConnectionSection(Section):
|
|||||||
value="20"
|
value="20"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.YOUTUBE_MUSIC_API_KEY = StringAttribute(
|
||||||
|
name="youtube_music_api_key",
|
||||||
|
description="This is the API key used by YouTube-Music internally.\nDw. if it is empty, Rachel will fetch it automatically for you <333\n(she will also update outdated api keys/those that don't work)",
|
||||||
|
value="AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.YOUTUBE_MUSIC_CLEAN_DATA = BoolAttribute(
|
||||||
|
name="youtube_music_clean_data",
|
||||||
|
description="If set to true, it exclusively fetches artists/albums/songs, not things like user channels etc.",
|
||||||
|
value="true"
|
||||||
|
)
|
||||||
|
|
||||||
self.ALL_YOUTUBE_URLS = UrlListAttribute(
|
self.ALL_YOUTUBE_URLS = UrlListAttribute(
|
||||||
name="youtube_url",
|
name="youtube_url",
|
||||||
description="This is used to detect, if an url is from youtube, or any alternativ frontend.\n"
|
description="This is used to detect, if an url is from youtube, or any alternativ frontend.\n"
|
||||||
@ -133,6 +145,8 @@ class ConnectionSection(Section):
|
|||||||
self.INVIDIOUS_INSTANCE,
|
self.INVIDIOUS_INSTANCE,
|
||||||
self.PIPED_INSTANCE,
|
self.PIPED_INSTANCE,
|
||||||
self.SLEEP_AFTER_YOUTUBE_403,
|
self.SLEEP_AFTER_YOUTUBE_403,
|
||||||
|
self.YOUTUBE_MUSIC_API_KEY,
|
||||||
|
self.YOUTUBE_MUSIC_CLEAN_DATA,
|
||||||
self.ALL_YOUTUBE_URLS,
|
self.ALL_YOUTUBE_URLS,
|
||||||
self.SPONSOR_BLOCK
|
self.SPONSOR_BLOCK
|
||||||
]
|
]
|
@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from .base_classes import SingleAttribute, StringAttribute, Section, Description, EmptyLine
|
from ..base_classes import SingleAttribute, StringAttribute, Section, Description, EmptyLine
|
||||||
|
|
||||||
LOG_LEVELS = {
|
LOG_LEVELS = {
|
||||||
"CRITICAL": 50,
|
"CRITICAL": 50,
|
||||||
@ -90,6 +90,11 @@ class LoggingSection(Section):
|
|||||||
description="The logger for the youtube scraper.",
|
description="The logger for the youtube scraper.",
|
||||||
value="youtube"
|
value="youtube"
|
||||||
)
|
)
|
||||||
|
self.YOUTUBE_MUSIC_LOGGER = LoggerAttribute(
|
||||||
|
name="youtube_music_logger",
|
||||||
|
description="The logger for the youtube music scraper.\n(The scraper is seperate to the youtube scraper)",
|
||||||
|
value="youtube_music"
|
||||||
|
)
|
||||||
self.ENCYCLOPAEDIA_METALLUM_LOGGER = LoggerAttribute(
|
self.ENCYCLOPAEDIA_METALLUM_LOGGER = LoggerAttribute(
|
||||||
name="metal_archives_logger",
|
name="metal_archives_logger",
|
||||||
description="The logger for the metal archives scraper.",
|
description="The logger for the metal archives scraper.",
|
||||||
@ -114,6 +119,7 @@ class LoggingSection(Section):
|
|||||||
self.DATABASE_LOGGER,
|
self.DATABASE_LOGGER,
|
||||||
self.MUSIFY_LOGGER,
|
self.MUSIFY_LOGGER,
|
||||||
self.YOUTUBE_LOGGER,
|
self.YOUTUBE_LOGGER,
|
||||||
|
self.YOUTUBE_MUSIC_LOGGER,
|
||||||
self.ENCYCLOPAEDIA_METALLUM_LOGGER,
|
self.ENCYCLOPAEDIA_METALLUM_LOGGER,
|
||||||
self.GENIUS_LOGGER
|
self.GENIUS_LOGGER
|
||||||
]
|
]
|
@ -1,4 +1,4 @@
|
|||||||
from .base_classes import Section, IntAttribute, ListAttribute, BoolAttribute
|
from ..base_classes import Section, IntAttribute, ListAttribute, BoolAttribute
|
||||||
|
|
||||||
|
|
||||||
class MiscSection(Section):
|
class MiscSection(Section):
|
@ -1,7 +1,7 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from ..path_manager import LOCATIONS
|
from ...path_manager import LOCATIONS
|
||||||
from .base_classes import Section, StringAttribute, ListAttribute
|
from ..base_classes import Section, StringAttribute, ListAttribute
|
||||||
|
|
||||||
|
|
||||||
class PathAttribute(StringAttribute):
|
class PathAttribute(StringAttribute):
|
61
src/music_kraken/utils/config/settings.py
Normal file
61
src/music_kraken/utils/config/settings.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
from typing import TypedDict, List
|
||||||
|
|
||||||
|
from urllib.parse import ParseResult
|
||||||
|
from logging import Logger
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsStructure(TypedDict):
|
||||||
|
hasnt_yet_started: bool
|
||||||
|
result_history: bool
|
||||||
|
history_length: int
|
||||||
|
happy_messages: List[str]
|
||||||
|
modify_gc: bool
|
||||||
|
id_bits: int
|
||||||
|
|
||||||
|
# audio
|
||||||
|
bitrate: int
|
||||||
|
audio_format: str
|
||||||
|
sort_by_date: bool
|
||||||
|
sort_album_by_type: bool
|
||||||
|
download_path: str
|
||||||
|
download_file: str
|
||||||
|
album_type_blacklist: List[str]
|
||||||
|
|
||||||
|
# connection
|
||||||
|
proxies: List[str]
|
||||||
|
tor: bool
|
||||||
|
tor_port: int
|
||||||
|
chunk_size: int
|
||||||
|
show_download_errors_threshold: float
|
||||||
|
|
||||||
|
# youtube
|
||||||
|
invidious_instance: ParseResult
|
||||||
|
piped_instance: ParseResult
|
||||||
|
sleep_after_youtube_403: float
|
||||||
|
youtube_music_api_key: str
|
||||||
|
youtube_music_clean_data: bool
|
||||||
|
youtube_url: List[ParseResult]
|
||||||
|
use_sponsor_block: bool
|
||||||
|
|
||||||
|
# logging
|
||||||
|
logging_format: str
|
||||||
|
log_level: int
|
||||||
|
download_logger: Logger
|
||||||
|
tagging_logger: Logger
|
||||||
|
codex_logger: Logger
|
||||||
|
object_logger: Logger
|
||||||
|
database_logger: Logger
|
||||||
|
musify_logger: Logger
|
||||||
|
youtube_logger: Logger
|
||||||
|
youtube_music_logger: Logger
|
||||||
|
metal_archives_logger: Logger
|
||||||
|
genius_logger: Logger
|
||||||
|
|
||||||
|
# paths
|
||||||
|
music_directory: Path
|
||||||
|
temp_directory: Path
|
||||||
|
log_file: Path
|
||||||
|
not_a_genre_regex: List[str]
|
||||||
|
ffmpeg_binary: Path
|
4
src/music_kraken/utils/config/utils.py
Normal file
4
src/music_kraken/utils/config/utils.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
def comment(uncommented_string: str) -> str:
|
||||||
|
_fragments = uncommented_string.split("\n")
|
||||||
|
_fragments = ["# " + frag for frag in _fragments]
|
||||||
|
return "\n".join(_fragments)
|
18
src/music_kraken/utils/debug_utils.py
Normal file
18
src/music_kraken/utils/debug_utils.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
|
||||||
|
from .path_manager import LOCATIONS
|
||||||
|
|
||||||
|
|
||||||
|
def dump_to_file(file_name: str, payload: str, is_json: bool = False, exit_after_dump: bool = True):
|
||||||
|
path = Path(LOCATIONS.TEMP_DIRECTORY, file_name)
|
||||||
|
print(f"Dumping payload to: \"{path}\"")
|
||||||
|
|
||||||
|
if is_json:
|
||||||
|
payload = json.dumps(json.loads(payload), indent=4)
|
||||||
|
|
||||||
|
with path.open("w") as f:
|
||||||
|
f.write(payload)
|
||||||
|
|
||||||
|
if exit_after_dump:
|
||||||
|
exit()
|
@ -11,6 +11,7 @@ class SourceTypes(Enum):
|
|||||||
class SourcePages(Enum):
|
class SourcePages(Enum):
|
||||||
YOUTUBE = "youtube"
|
YOUTUBE = "youtube"
|
||||||
MUSIFY = "musify"
|
MUSIFY = "musify"
|
||||||
|
YOUTUBE_MUSIC = "youtube music"
|
||||||
GENIUS = "genius"
|
GENIUS = "genius"
|
||||||
MUSICBRAINZ = "musicbrainz"
|
MUSICBRAINZ = "musicbrainz"
|
||||||
ENCYCLOPAEDIA_METALLUM = "encyclopaedia metallum"
|
ENCYCLOPAEDIA_METALLUM = "encyclopaedia metallum"
|
||||||
|
@ -1,4 +1,11 @@
|
|||||||
import os
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
def clear_console():
|
def clear_console():
|
||||||
os.system('cls' if os.name in ('nt', 'dos') else 'clear')
|
os.system('cls' if os.name in ('nt', 'dos') else 'clear')
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_millis() -> int:
|
||||||
|
dt = datetime.now()
|
||||||
|
return int(dt.microsecond / 1_000)
|
||||||
|
@ -20,8 +20,12 @@ class Locations:
|
|||||||
self.CONFIG_DIRECTORY = get_config_directory(str(application_name))
|
self.CONFIG_DIRECTORY = get_config_directory(str(application_name))
|
||||||
self.CONFIG_DIRECTORY.mkdir(exist_ok=True, parents=True)
|
self.CONFIG_DIRECTORY.mkdir(exist_ok=True, parents=True)
|
||||||
self.CONFIG_FILE = Path(self.CONFIG_DIRECTORY, f"{application_name}.conf")
|
self.CONFIG_FILE = Path(self.CONFIG_DIRECTORY, f"{application_name}.conf")
|
||||||
|
self.LEGACY_CONFIG_FILE = Path(self.CONFIG_DIRECTORY, f"{application_name}.conf")
|
||||||
|
|
||||||
self.FFMPEG_BIN = Path(FFmpeg(enable_log=False).get_ffmpeg_bin())
|
self.FFMPEG_BIN = Path(FFmpeg(enable_log=False).get_ffmpeg_bin())
|
||||||
|
|
||||||
|
def get_config_file(self, config_name: str) -> Path:
|
||||||
|
return Path(self.CONFIG_DIRECTORY, f"{config_name}.toml")
|
||||||
|
|
||||||
def get_log_file(self, file_name: os.PathLike) -> Path:
|
def get_log_file(self, file_name: os.PathLike) -> Path:
|
||||||
return Path(self.TEMP_DIRECTORY, file_name)
|
return Path(self.TEMP_DIRECTORY, file_name)
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
URL_PATTERN = 'https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+'
|
URL_PATTERN = r"https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+"
|
||||||
|
INT_PATTERN = r"^\d*$"
|
||||||
|
FLOAT_PATTERN = r"^[\d|\,|\.]*$"
|
||||||
|
@ -1,110 +1,20 @@
|
|||||||
import logging
|
|
||||||
import random
|
import random
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Tuple, Set, Dict
|
|
||||||
from urllib.parse import ParseResult
|
|
||||||
|
|
||||||
from .path_manager import LOCATIONS
|
from .config import main_settings
|
||||||
from .config import LOGGING_SECTION, AUDIO_SECTION, CONNECTION_SECTION, MISC_SECTION, PATHS_SECTION
|
|
||||||
from .enums.album import AlbumType
|
|
||||||
|
|
||||||
CONFIG_FILE = LOCATIONS.CONFIG_FILE
|
DEBUG = False
|
||||||
|
DEBUG_YOUTUBE_INITIALIZING = DEBUG and False
|
||||||
# modifies the garbage collector to speed up the program
|
DEBUG_PAGES = DEBUG and False
|
||||||
# https://mkennedy.codes/posts/python-gc-settings-change-this-and-make-your-app-go-20pc-faster/
|
|
||||||
# https://web.archive.org/web/20221124122222/https://mkennedy.codes/posts/python-gc-settings-change-this-and-make-your-app-go-20pc-faster/
|
|
||||||
MODIFY_GC: bool = MISC_SECTION.MODIFY_GC.object_from_value
|
|
||||||
|
|
||||||
ID_BITS: int = MISC_SECTION.ID_BITS.object_from_value
|
|
||||||
ID_RANGE: Tuple[int, int] = (0, int(2 ** ID_BITS))
|
|
||||||
|
|
||||||
"""
|
|
||||||
I will now and then use those messages in the programm.
|
|
||||||
But I won't overuse them dw.
|
|
||||||
|
|
||||||
I will keep those messages, if you disagree with me on the messages,
|
|
||||||
feel free to fork the programm and edit them, or just edit them in the config
|
|
||||||
file once I implemented it. (I did it is in ~/.config/music-kraken/music-kraken.conf)
|
|
||||||
"""
|
|
||||||
HAPPY_MESSAGES: List[str] = MISC_SECTION.HAPPY_MESSAGES.object_from_value
|
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
print("DEBUG ACTIVE")
|
||||||
|
|
||||||
def get_random_message() -> str:
|
def get_random_message() -> str:
|
||||||
return random.choice(HAPPY_MESSAGES)
|
return random.choice(main_settings['happy_messages'])
|
||||||
|
|
||||||
|
|
||||||
TEMP_DIR = PATHS_SECTION.TEMP_DIRECTORY.object_from_value
|
HIGHEST_ID = 2**main_settings['id_bits']
|
||||||
LOG_PATH = PATHS_SECTION.LOG_PATH.object_from_value
|
|
||||||
MUSIC_DIR: Path = PATHS_SECTION.MUSIC_DIRECTORY.object_from_value
|
|
||||||
|
|
||||||
NOT_A_GENRE_REGEX: Tuple[str] = PATHS_SECTION.NOT_A_GENRE_REGEX.object_from_value
|
|
||||||
|
|
||||||
# configure logger default
|
|
||||||
logging.basicConfig(
|
|
||||||
level=LOGGING_SECTION.LOG_LEVEL.object_from_value,
|
|
||||||
format=LOGGING_SECTION.FORMAT.object_from_value,
|
|
||||||
handlers=[
|
|
||||||
logging.FileHandler(LOG_PATH),
|
|
||||||
logging.StreamHandler()
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
OBJECT_LOGGER = LOGGING_SECTION.OBJECT_LOGGER.object_from_value
|
|
||||||
DATABASE_LOGGER = LOGGING_SECTION.DATABASE_LOGGER.object_from_value
|
|
||||||
|
|
||||||
YOUTUBE_LOGGER = LOGGING_SECTION.YOUTUBE_LOGGER.object_from_value
|
|
||||||
MUSIFY_LOGGER = LOGGING_SECTION.MUSIFY_LOGGER.object_from_value
|
|
||||||
GENIUS_LOGGER = LOGGING_SECTION.GENIUS_LOGGER
|
|
||||||
ENCYCLOPAEDIA_METALLUM_LOGGER = LOGGING_SECTION.ENCYCLOPAEDIA_METALLUM_LOGGER.object_from_value
|
|
||||||
|
|
||||||
DOWNLOAD_LOGGER = LOGGING_SECTION.DOWNLOAD_LOGGER.object_from_value
|
|
||||||
TAGGING_LOGGER = LOGGING_SECTION.TAGGING_LOGGER.object_from_value
|
|
||||||
CODEX_LOGGER = LOGGING_SECTION.CODEX_LOGGER.object_from_value
|
|
||||||
|
|
||||||
# kB per second
|
|
||||||
BITRATE = AUDIO_SECTION.BITRATE.object_from_value
|
|
||||||
AUDIO_FORMAT = AUDIO_SECTION.AUDIO_FORMAT.object_from_value
|
|
||||||
|
|
||||||
DOWNLOAD_PATH = AUDIO_SECTION.DOWNLOAD_PATH.object_from_value
|
|
||||||
DOWNLOAD_FILE = AUDIO_SECTION.DOWNLOAD_FILE.object_from_value
|
|
||||||
|
|
||||||
TOR: bool = CONNECTION_SECTION.USE_TOR.object_from_value
|
|
||||||
PROXIES_LIST: List[Dict[str, str]] = CONNECTION_SECTION.PROXIES.object_from_value
|
|
||||||
proxies = {}
|
|
||||||
if len(CONNECTION_SECTION.PROXIES) > 0:
|
|
||||||
"""
|
|
||||||
TODO
|
|
||||||
rotating proxies
|
|
||||||
"""
|
|
||||||
proxies = CONNECTION_SECTION.PROXIES.object_from_value[0]
|
|
||||||
if TOR:
|
|
||||||
proxies = {
|
|
||||||
'http': f'socks5h://127.0.0.1:{CONNECTION_SECTION.TOR_PORT.object_from_value}',
|
|
||||||
'https': f'socks5h://127.0.0.1:{CONNECTION_SECTION.TOR_PORT.object_from_value}'
|
|
||||||
}
|
|
||||||
INVIDIOUS_INSTANCE: ParseResult = CONNECTION_SECTION.INVIDIOUS_INSTANCE.object_from_value
|
|
||||||
PIPED_INSTANCE: ParseResult = CONNECTION_SECTION.PIPED_INSTANCE.object_from_value
|
|
||||||
|
|
||||||
ALL_YOUTUBE_URLS: List[ParseResult] = CONNECTION_SECTION.ALL_YOUTUBE_URLS.object_from_value
|
|
||||||
ENABLE_SPONSOR_BLOCK: bool = CONNECTION_SECTION.SPONSOR_BLOCK.object_from_value
|
|
||||||
|
|
||||||
# size of the chunks that are streamed
|
|
||||||
CHUNK_SIZE = CONNECTION_SECTION.CHUNK_SIZE.object_from_value
|
|
||||||
# this is a percentage describing the percentage of failed downloads,
|
|
||||||
# relative to the total downloads.
|
|
||||||
# If the percentage goes over this threshold DownloadResult returns the download errors
|
|
||||||
# in the __str__ method
|
|
||||||
SHOW_DOWNLOAD_ERRORS_THRESHOLD = CONNECTION_SECTION.SHOW_DOWNLOAD_ERRORS_THRESHOLD.object_from_value
|
|
||||||
|
|
||||||
SORT_BY_DATE = AUDIO_SECTION.SORT_BY_DATE.object_from_value
|
|
||||||
SORT_BY_ALBUM_TYPE = AUDIO_SECTION.SORT_BY_ALBUM_TYPE.object_from_value
|
|
||||||
|
|
||||||
ALBUM_TYPE_BLACKLIST: Set[AlbumType] = set(AUDIO_SECTION.ALBUM_TYPE_BLACKLIST.object_from_value)
|
|
||||||
|
|
||||||
THREADED = False
|
|
||||||
|
|
||||||
ENABLE_RESULT_HISTORY: bool = MISC_SECTION.ENABLE_RESULT_HISTORY.object_from_value
|
|
||||||
HISTORY_LENGTH: int = MISC_SECTION.HISTORY_LENGTH.object_from_value
|
|
||||||
|
|
||||||
HELP_MESSAGE = """
|
HELP_MESSAGE = """
|
||||||
to search:
|
to search:
|
||||||
@ -120,8 +30,3 @@ to download:
|
|||||||
|
|
||||||
have fun :3
|
have fun :3
|
||||||
""".strip()
|
""".strip()
|
||||||
|
|
||||||
FFMPEG_BINARY: Path = PATHS_SECTION.FFMPEG_BINARY.object_from_value
|
|
||||||
|
|
||||||
HASNT_YET_STARTED: bool = MISC_SECTION.HASNT_YET_STARTED.object_from_value
|
|
||||||
SLEEP_AFTER_YOUTUBE_403: float = CONNECTION_SECTION.SLEEP_AFTER_YOUTUBE_403.object_from_value
|
|
||||||
|
@ -68,3 +68,10 @@ def clean_song_title(raw_song_title: str, artist_name: str) -> str:
|
|||||||
raw_song_title = raw_song_title[1:].strip()
|
raw_song_title = raw_song_title[1:].strip()
|
||||||
|
|
||||||
return raw_song_title.strip()
|
return raw_song_title.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def comment(uncommented_string: str) -> str:
|
||||||
|
_fragments = uncommented_string.split("\n")
|
||||||
|
_fragments = ["# " + frag for frag in _fragments]
|
||||||
|
return "\n".join(_fragments)
|
||||||
|
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
from ...utils.shared import SHOW_DOWNLOAD_ERRORS_THRESHOLD, DOWNLOAD_LOGGER as LOGGER
|
from ...utils.config import main_settings, logging_settings
|
||||||
from ...objects import Target
|
from ...objects import Target
|
||||||
|
|
||||||
UNIT_PREFIXES: List[str] = ["", "k", "m", "g", "t"]
|
UNIT_PREFIXES: List[str] = ["", "k", "m", "g", "t"]
|
||||||
UNIT_DIVISOR = 1024
|
UNIT_DIVISOR = 1024
|
||||||
|
|
||||||
|
|
||||||
|
LOGGER = logging_settings["download_logger"]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class DownloadResult:
|
class DownloadResult:
|
||||||
total: int = 0
|
total: int = 0
|
||||||
@ -44,7 +47,7 @@ class DownloadResult:
|
|||||||
if self.is_fatal_error:
|
if self.is_fatal_error:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return self.failure_percentage > SHOW_DOWNLOAD_ERRORS_THRESHOLD
|
return self.failure_percentage > main_settings["show_download_errors_threshold"]
|
||||||
|
|
||||||
def _size_val_unit_pref_ind(self, val: float, ind: int) -> Tuple[float, int]:
|
def _size_val_unit_pref_ind(self, val: float, ind: int) -> Tuple[float, int]:
|
||||||
if val < UNIT_DIVISOR:
|
if val < UNIT_DIVISOR:
|
||||||
|
6
src/settings.py
Normal file
6
src/settings.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
import tomllib
|
||||||
|
|
||||||
|
|
||||||
|
data = tomllib.load(Path("/home/lars/music-kraken.conf").open("r"))
|
||||||
|
print(data)
|
Loading…
Reference in New Issue
Block a user