Compare commits
	
		
			37 Commits
		
	
	
		
			1.15.1
			...
			ae921c3626
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| ae921c3626 | |||
| f52b5e6325 | |||
| 25eceb727b | |||
| e77afa584b | |||
| e2b7c5475f | |||
|  | 7bc73de128 | ||
|  | 3d94e6e2dd | ||
|  | a015b8918e | ||
|  | 90a352153d | ||
|  | d7e9f45082 | ||
|  | e11cfbda67 | ||
|  | 60e84a0638 | ||
|  | 14f1fe4b16 | ||
| c0ef24a28e | |||
| cacff47643 | |||
| 0179246ec0 | |||
| 3d432cd0d7 | |||
| 0080a48e70 | |||
| ea5adfbe8a | |||
| fa723d7747 | |||
| 312e57d82e | |||
| a998e52cd9 | |||
| b4c73d56a7 | |||
| 1735ff4e1d | |||
| be09562632 | |||
| 29770825a4 | |||
| 81708ba100 | |||
| 301ff82bcf | |||
| 06ffae06a6 | |||
| 919a99885c | |||
| e20b14a9df | |||
| b933c6ac14 | |||
| 3c5bbc19af | |||
| 06acf22abb | |||
| 1e62d371cd | |||
| 24a90f1cdf | |||
| d9c711a2f8 | 
							
								
								
									
										4
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -20,12 +20,16 @@ | |||||||
|         "Bandcamp", |         "Bandcamp", | ||||||
|         "dotenv", |         "dotenv", | ||||||
|         "encyclopaedia", |         "encyclopaedia", | ||||||
|  |         "ENDC", | ||||||
|         "levenshtein", |         "levenshtein", | ||||||
|         "metallum", |         "metallum", | ||||||
|         "musify", |         "musify", | ||||||
|         "OKBLUE", |         "OKBLUE", | ||||||
|  |         "pathvalidate", | ||||||
|         "Referer", |         "Referer", | ||||||
|  |         "sponsorblock", | ||||||
|         "tracksort", |         "tracksort", | ||||||
|  |         "translit", | ||||||
|         "unmap", |         "unmap", | ||||||
|         "youtube" |         "youtube" | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,66 +0,0 @@ | |||||||
| DROP TABLE IF EXISTS artist; |  | ||||||
| CREATE TABLE artist ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     name TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS artist_release_group; |  | ||||||
| CREATE TABLE artist_release_group ( |  | ||||||
|     artist_id TEXT NOT NULL, |  | ||||||
|     release_group_id TEXT NOT NULL |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS artist_track; |  | ||||||
| CREATE TABLE artist_track ( |  | ||||||
|     artist_id TEXT NOT NULL, |  | ||||||
|     track_id TEXT NOT NULL |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS release_group; |  | ||||||
| CREATE TABLE release_group ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     albumartist TEXT, |  | ||||||
|     albumsort INT, |  | ||||||
|     musicbrainz_albumtype TEXT, |  | ||||||
|     compilation TEXT, |  | ||||||
|     album_artist_id TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS release_; |  | ||||||
| CREATE TABLE release_ ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     release_group_id TEXT NOT NULL, |  | ||||||
|     title TEXT,  |  | ||||||
|     copyright TEXT, |  | ||||||
|     album_status TEXT, |  | ||||||
|     language TEXT, |  | ||||||
|     year TEXT, |  | ||||||
|     date TEXT, |  | ||||||
|     country TEXT, |  | ||||||
|     barcode TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS track; |  | ||||||
| CREATE TABLE track ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     downloaded BOOLEAN NOT NULL DEFAULT 0, |  | ||||||
|     release_id TEXT NOT NULL, |  | ||||||
|     track TEXT, |  | ||||||
|     length INT, |  | ||||||
|     tracknumber TEXT, |  | ||||||
|     isrc TEXT, |  | ||||||
|     genre TEXT, |  | ||||||
|     lyrics TEXT, |  | ||||||
|     path TEXT, |  | ||||||
|     file TEXT, |  | ||||||
|     url TEXT, |  | ||||||
|     src TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS source; |  | ||||||
| CREATE TABLE source ( |  | ||||||
|     track_id TEXT NOT NULL, |  | ||||||
|     src TEXT NOT NULL, |  | ||||||
|     url TEXT NOT NULL, |  | ||||||
|     valid BOOLEAN NOT NULL DEFAULT 1 |  | ||||||
| ); |  | ||||||
| @@ -1,53 +1,15 @@ | |||||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||||
| <svg | <svg version="1.0" width="1024" height="1024" viewBox="0 0 1024.000000 1024.000000" | ||||||
|    version="1.0" |    preserveAspectRatio="xMidYMid meet" id="svg168" sodipodi:docname="02.svg" | ||||||
|    width="1024.000000pt" |    inkscape:version="1.2.2 (b0a8486541, 2022-12-01)" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||||
|    height="1024.000000pt" |    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns="http://www.w3.org/2000/svg" | ||||||
|    viewBox="0 0 1024.000000 1024.000000" |  | ||||||
|    preserveAspectRatio="xMidYMid meet" |  | ||||||
|    id="svg168" |  | ||||||
|    sodipodi:docname="02.svg" |  | ||||||
|    inkscape:version="1.2.2 (b0a8486541, 2022-12-01)" |  | ||||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" |  | ||||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |  | ||||||
|    xmlns="http://www.w3.org/2000/svg" |  | ||||||
|    xmlns:svg="http://www.w3.org/2000/svg"> |    xmlns:svg="http://www.w3.org/2000/svg"> | ||||||
|   <defs |    <defs id="defs172" /> | ||||||
|      id="defs172" /> |  | ||||||
|   <sodipodi:namedview |    <rect x="0" y="0" width="100%" height="100%" rx="10%" fill="#f0f0f0" id="background" /> | ||||||
|      id="namedview170" |  | ||||||
|      pagecolor="#ffffff" |    <g transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" fill="#000000" id="wireframe"> | ||||||
|      bordercolor="#000000" |  | ||||||
|      borderopacity="0.25" |  | ||||||
|      inkscape:showpageshadow="2" |  | ||||||
|      inkscape:pageopacity="0.0" |  | ||||||
|      inkscape:pagecheckerboard="0" |  | ||||||
|      inkscape:deskcolor="#d1d1d1" |  | ||||||
|      inkscape:document-units="pt" |  | ||||||
|      showgrid="false" |  | ||||||
|      inkscape:zoom="0.69140625" |  | ||||||
|      inkscape:cx="437.51412" |  | ||||||
|      inkscape:cy="984.22599" |  | ||||||
|      inkscape:window-width="1866" |  | ||||||
|      inkscape:window-height="1012" |  | ||||||
|      inkscape:window-x="0" |  | ||||||
|      inkscape:window-y="0" |  | ||||||
|      inkscape:window-maximized="1" |  | ||||||
|      inkscape:current-layer="g166" /> |  | ||||||
|   <g |  | ||||||
|      transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" |  | ||||||
|      fill="#000000" |  | ||||||
|      stroke="none" |  | ||||||
|      id="g166"> |  | ||||||
|     <rect |  | ||||||
|        x="10" |  | ||||||
|        y="10" |  | ||||||
|        width="10239.509" |  | ||||||
|        height="10229.297" |  | ||||||
|        rx="1503.97427" |  | ||||||
|        fill="#f0f0f0" |  | ||||||
|        id="rect148" |  | ||||||
|        style="stroke-width:10.1935" /> |  | ||||||
|       <path |       <path | ||||||
|          d="M4784 8535 c-695 -66 -1296 -270 -1819 -616 -369 -245 -627 -477 -843 -763 -304 -402 -461 -948 -479 -1666 -9 -352 13 -581 82 -850 40 -156 61 -215 117 -323 55 -105 114 -169 194 -208 61 -30 69 -32 148 -27 179 12 320 123 356 281 8 38 6 64 -15 154 -14 59 -32 140 -41 178 -8 39 -21 95 -29 125 -41 165 -50 270 -50 565 0 261 3 309 28 480 30 214 28 242 -24 293 -41 40 -146 68 -312 84 -70 6 -127 15 -127 20 0 15 102 293 139 378 79 183 209 386 348 546 129 147 379 360 588 501 124 83 234 147 242 139 3 -3 -21 -36 -54 -73 -178 -203 -321 -426 -411 -643 -110 -265 -152 -484 -153 -804 -1 -338 43 -569 166 -877 56 -138 108 -235 192 -357 83 -119 95 -148 137 -323 54 -224 163 -505 223 -574 50 -57 102 -69 147 -34 46 36 34 86 -63 252 -65 113 -88 182 -107 332 -17 133 -20 142 -164 445 -148 313 -197 440 -250 650 -42 169 -60 311 -60 480 0 575 268 1118 733 1488 260 206 635 354 1060 418 142 21 566 26 722 9 323 -36 644 -133 905 -273 180 -96 322 -205 481 -368 464 -478 615 -1159 402 -1809 -22 -66 -78 -191 -142 -315 -275 -536 -251 -481 -271 -620 -10 -69 -28 -177 -40 -240 -27 -146 -37 -342 -20 -394 15 -47 51 -64 87 -41 73 49 164 319 184 549 17 208 39 271 158 461 197 313 285 530 342 845 31 167 34 543 6 685 -82 408 -210 682 -470 1005 -47 58 -83 107 -81 109 1 2 21 -7 43 -20 22 -13 77 -46 123 -73 324 -190 683 -538 883 -856 91 -145 268 -561 247 -582 -4 -3 -60 -16 -125 -27 -175 -31 -300 -80 -364 -141 -29 -26 -29 -54 -2 -190 64 -330 65 -751 3 -1081 -8 -46 -32 -145 -51 -219 -42 -157 -47 -246 -19 -329 20 -58 68 -118 120 -151 106 -65 273 -77 372 -27 140 71 251 273 328 592 55 229 76 429 76 725 0 991 -288 1664 -949 2213 -577 481 -1339 795 -2151 887 -154 18 -537 21 -696 5z" |          d="M4784 8535 c-695 -66 -1296 -270 -1819 -616 -369 -245 -627 -477 -843 -763 -304 -402 -461 -948 -479 -1666 -9 -352 13 -581 82 -850 40 -156 61 -215 117 -323 55 -105 114 -169 194 -208 61 -30 69 -32 148 -27 179 12 320 123 356 281 8 38 6 64 -15 154 -14 59 -32 140 -41 178 -8 39 -21 95 -29 125 -41 165 -50 270 -50 565 0 261 3 309 28 480 30 214 28 242 -24 293 -41 40 -146 68 -312 84 -70 6 -127 15 -127 20 0 15 102 293 139 378 79 183 209 386 348 546 129 147 379 360 588 501 124 83 234 147 242 139 3 -3 -21 -36 -54 -73 -178 -203 -321 -426 -411 -643 -110 -265 -152 -484 -153 -804 -1 -338 43 -569 166 -877 56 -138 108 -235 192 -357 83 -119 95 -148 137 -323 54 -224 163 -505 223 -574 50 -57 102 -69 147 -34 46 36 34 86 -63 252 -65 113 -88 182 -107 332 -17 133 -20 142 -164 445 -148 313 -197 440 -250 650 -42 169 -60 311 -60 480 0 575 268 1118 733 1488 260 206 635 354 1060 418 142 21 566 26 722 9 323 -36 644 -133 905 -273 180 -96 322 -205 481 -368 464 -478 615 -1159 402 -1809 -22 -66 -78 -191 -142 -315 -275 -536 -251 -481 -271 -620 -10 -69 -28 -177 -40 -240 -27 -146 -37 -342 -20 -394 15 -47 51 -64 87 -41 73 49 164 319 184 549 17 208 39 271 158 461 197 313 285 530 342 845 31 167 34 543 6 685 -82 408 -210 682 -470 1005 -47 58 -83 107 -81 109 1 2 21 -7 43 -20 22 -13 77 -46 123 -73 324 -190 683 -538 883 -856 91 -145 268 -561 247 -582 -4 -3 -60 -16 -125 -27 -175 -31 -300 -80 -364 -141 -29 -26 -29 -54 -2 -190 64 -330 65 -751 3 -1081 -8 -46 -32 -145 -51 -219 -42 -157 -47 -246 -19 -329 20 -58 68 -118 120 -151 106 -65 273 -77 372 -27 140 71 251 273 328 592 55 229 76 429 76 725 0 991 -288 1664 -949 2213 -577 481 -1339 795 -2151 887 -154 18 -537 21 -696 5z" | ||||||
|          id="path150" /> |          id="path150" /> | ||||||
|   | |||||||
| Before Width: | Height: | Size: 5.8 KiB After Width: | Height: | Size: 5.1 KiB | 
| Before Width: | Height: | Size: 302 KiB | 
| Before Width: | Height: | Size: 30 KiB | 
| Before Width: | Height: | Size: 292 KiB | 
| @@ -1,76 +0,0 @@ | |||||||
| <?xml version="1.0" standalone="no"?> |  | ||||||
| <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN" |  | ||||||
|  "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd"> |  | ||||||
| <svg version="1.0" xmlns="http://www.w3.org/2000/svg" |  | ||||||
|  width="1024.000000pt" height="1024.000000pt" viewBox="0 0 1024.000000 1024.000000" |  | ||||||
|  preserveAspectRatio="xMidYMid meet"> |  | ||||||
|  |  | ||||||
| <g transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" |  | ||||||
| fill="#000000" stroke="none"> |  | ||||||
| <path d="M4965 7890 c-800 -37 -1523 -349 -2220 -960 -398 -349 -585 -575 |  | ||||||
| -740 -895 -186 -381 -255 -705 -255 -1187 l0 -170 -31 7 c-16 4 -57 4 -90 0 |  | ||||||
| l-59 -7 0 -46 c0 -26 7 -85 16 -132 32 -182 33 -172 -25 -256 -84 -120 -144 |  | ||||||
| -270 -172 -427 -19 -116 -7 -352 25 -477 126 -486 561 -875 1080 -965 192 -33 |  | ||||||
| 458 -14 628 44 270 93 516 298 632 529 75 150 106 265 106 404 0 221 -64 380 |  | ||||||
| -230 566 -256 287 -315 365 -382 509 -71 151 -81 208 -82 458 -1 207 1 226 26 |  | ||||||
| 322 27 103 90 244 147 327 56 80 154 168 237 212 93 49 184 72 354 87 l125 12 |  | ||||||
| 3 40 c5 73 -8 80 -150 78 -189 -2 -303 -33 -465 -124 -224 -126 -412 -428 |  | ||||||
| -445 -713 -11 -97 -4 -324 13 -441 29 -193 116 -405 226 -552 22 -28 99 -118 |  | ||||||
| 173 -200 210 -233 261 -314 281 -445 24 -155 -6 -312 -94 -483 -54 -106 -197 |  | ||||||
| -252 -312 -319 -177 -103 -317 -145 -515 -153 -202 -8 -362 24 -547 112 -140 |  | ||||||
| 66 -228 128 -339 239 -187 187 -278 396 -291 661 -11 242 60 492 169 595 32 |  | ||||||
| 30 101 64 176 86 42 12 62 23 61 34 0 8 -15 53 -33 100 -28 72 -37 85 -56 85 |  | ||||||
| -24 0 -33 -11 -50 -57 -20 -57 -68 -20 -94 71 -9 31 -16 72 -16 89 0 32 0 32 |  | ||||||
| 50 32 l50 0 0 40 c0 32 5 43 29 61 l29 21 6 147 c11 255 40 516 73 645 84 337 |  | ||||||
| 221 619 423 869 58 72 181 197 233 237 39 31 36 11 -24 -121 -106 -239 -174 |  | ||||||
| -489 -198 -729 -6 -58 -14 -124 -17 -147 l-5 -43 83 0 83 0 2 28 c6 87 48 350 |  | ||||||
| 69 434 102 408 382 841 719 1111 444 356 1004 562 1620 597 534 30 1027 -72 |  | ||||||
| 1505 -310 599 -298 964 -752 1090 -1355 25 -121 41 -502 22 -514 -7 -4 30 -5 |  | ||||||
| 81 -3 50 2 94 6 97 9 3 3 5 102 4 221 0 153 -6 251 -18 331 -17 110 -74 358 |  | ||||||
| -95 414 -6 15 -7 27 -2 27 16 0 150 -148 223 -246 171 -231 272 -434 347 -701 |  | ||||||
| 66 -234 95 -428 76 -502 -7 -31 -8 -64 -2 -101 4 -30 11 -131 15 -225 l7 -170 |  | ||||||
| 70 -1 c135 -3 125 1 125 -41 0 -46 -25 -142 -40 -157 -6 -6 -53 -11 -113 -12 |  | ||||||
| l-102 -1 -7 -49 c-4 -27 -9 -63 -13 -80 -5 -30 -5 -30 71 -54 89 -28 144 -72 |  | ||||||
| 205 -162 98 -143 139 -280 139 -459 0 -169 -28 -286 -105 -439 -162 -321 -435 |  | ||||||
| -531 -787 -606 -126 -27 -362 -24 -493 5 -220 50 -383 138 -531 285 -108 109 |  | ||||||
| -156 188 -188 312 -21 80 -20 225 0 317 32 139 97 245 300 488 186 221 240 |  | ||||||
| 310 307 502 l32 92 0 290 0 290 -33 95 c-18 52 -61 154 -97 225 -103 207 -209 |  | ||||||
| 316 -400 410 -144 72 -238 90 -460 88 -194 -1 -215 -7 -215 -62 0 -28 5 -43 |  | ||||||
| 18 -49 9 -5 82 -12 162 -16 235 -10 349 -41 485 -131 143 -95 243 -219 308 |  | ||||||
| -383 57 -141 70 -223 70 -412 -1 -406 -86 -608 -406 -972 -117 -133 -170 -220 |  | ||||||
| -215 -350 -116 -340 24 -729 352 -976 76 -58 249 -149 342 -180 209 -71 469 |  | ||||||
| -85 697 -38 279 57 494 174 699 377 257 256 378 540 378 889 0 103 -5 146 -25 |  | ||||||
| 225 -39 157 -123 300 -221 380 l-45 37 21 36 c33 56 78 302 61 331 -5 7 -41 |  | ||||||
| 17 -81 22 -40 5 -75 11 -78 14 -2 2 -6 35 -8 72 -22 403 -38 538 -89 728 -152 |  | ||||||
| 580 -405 994 -886 1447 -188 177 -268 241 -496 398 -389 269 -901 464 -1397 |  | ||||||
| 535 -99 14 -425 36 -486 33 -14 -1 -97 -4 -185 -8z"/> |  | ||||||
| <path d="M2446 5430 c-70 -11 -124 -41 -200 -111 -74 -68 -120 -161 -142 -289 |  | ||||||
| -52 -305 58 -798 216 -960 71 -73 124 -95 230 -95 107 0 172 27 231 94 114 |  | ||||||
| 129 143 301 137 811 l-3 305 -28 57 c-35 72 -110 140 -183 168 -66 24 -175 33 |  | ||||||
| -258 20z m186 -340 c34 -42 39 -83 41 -385 1 -211 -1 -244 -16 -272 -35 -66 |  | ||||||
| -122 -92 -175 -54 -58 41 -67 88 -66 346 1 248 8 353 28 380 32 44 146 35 188 |  | ||||||
| -15z"/> |  | ||||||
| <path d="M7603 5430 c-118 -24 -229 -113 -266 -216 -37 -99 -47 -568 -17 -779 |  | ||||||
| 45 -314 174 -465 398 -465 96 0 158 27 225 99 76 82 111 169 148 377 30 167 |  | ||||||
| 34 565 6 664 -68 240 -261 366 -494 320z m153 -331 c31 -25 64 -116 75 -204 |  | ||||||
| 13 -106 5 -336 -15 -410 -40 -153 -118 -198 -196 -116 -56 59 -72 114 -78 271 |  | ||||||
| -2 75 0 171 7 215 6 44 13 106 17 137 7 75 38 121 84 128 38 6 84 -3 106 -21z"/> |  | ||||||
| <path d="M4219 5372 c-199 -52 -323 -212 -364 -468 -6 -38 -4 -42 23 -53 65 |  | ||||||
| -27 107 7 172 136 61 123 123 183 212 208 104 28 199 18 293 -30 41 -21 96 |  | ||||||
| -60 122 -87 54 -54 76 -60 96 -22 29 56 -11 153 -93 228 -104 94 -296 131 |  | ||||||
| -461 88z"/> |  | ||||||
| <path d="M5739 5278 c-135 -48 -216 -116 -250 -211 -13 -37 -13 -40 10 -58 37 |  | ||||||
| -30 74 -25 105 16 15 19 51 47 79 62 45 23 65 27 142 27 76 0 97 -3 135 -23 |  | ||||||
| 113 -60 165 -112 263 -266 42 -65 73 -81 122 -61 58 24 50 47 -96 286 -112 |  | ||||||
| 185 -190 238 -359 247 -70 3 -96 0 -151 -19z"/> |  | ||||||
| <path d="M7247 5166 c-5 -15 -6 -31 -3 -34 8 -8 18 25 14 45 -3 13 -6 10 -11 |  | ||||||
| -11z"/> |  | ||||||
| <path d="M4626 4169 c-50 -8 -86 -37 -114 -90 -20 -37 -23 -54 -18 -104 21 |  | ||||||
| -232 161 -450 350 -544 175 -88 392 -92 571 -11 185 84 323 271 355 482 19 |  | ||||||
| 127 -9 233 -67 256 -30 13 -1008 22 -1077 11z m928 -169 c35 -13 40 -45 21 |  | ||||||
| -121 -35 -134 -108 -240 -196 -284 -65 -33 -172 -48 -309 -43 -100 4 -122 8 |  | ||||||
| -166 31 -89 45 -160 146 -189 269 -22 94 -20 137 8 148 30 12 799 13 831 0z"/> |  | ||||||
| <path d="M9980 552 c0 -4 21 -28 46 -52 55 -53 55 -40 2 19 -38 41 -48 48 -48 |  | ||||||
| 33z"/> |  | ||||||
| </g> |  | ||||||
| </svg> |  | ||||||
| Before Width: | Height: | Size: 4.9 KiB | 
| Before Width: | Height: | Size: 168 KiB | 
| @@ -1,73 +0,0 @@ | |||||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> |  | ||||||
| <svg |  | ||||||
|    version="1.0" |  | ||||||
|    width="1024.000000pt" |  | ||||||
|    height="1024.000000pt" |  | ||||||
|    viewBox="0 0 1024.000000 1024.000000" |  | ||||||
|    preserveAspectRatio="xMidYMid meet" |  | ||||||
|    id="svg168" |  | ||||||
|    sodipodi:docname="02.svg" |  | ||||||
|    inkscape:version="1.2.2 (b0a8486541, 2022-12-01)" |  | ||||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" |  | ||||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |  | ||||||
|    xmlns="http://www.w3.org/2000/svg" |  | ||||||
|    xmlns:svg="http://www.w3.org/2000/svg"> |  | ||||||
|   <defs |  | ||||||
|      id="defs172" /> |  | ||||||
|   <sodipodi:namedview |  | ||||||
|      id="namedview170" |  | ||||||
|      pagecolor="#ffffff" |  | ||||||
|      bordercolor="#000000" |  | ||||||
|      borderopacity="0.25" |  | ||||||
|      inkscape:showpageshadow="2" |  | ||||||
|      inkscape:pageopacity="0.0" |  | ||||||
|      inkscape:pagecheckerboard="0" |  | ||||||
|      inkscape:deskcolor="#d1d1d1" |  | ||||||
|      inkscape:document-units="pt" |  | ||||||
|      showgrid="false" |  | ||||||
|      inkscape:zoom="0.69140625" |  | ||||||
|      inkscape:cx="437.51412" |  | ||||||
|      inkscape:cy="984.22599" |  | ||||||
|      inkscape:window-width="1866" |  | ||||||
|      inkscape:window-height="1012" |  | ||||||
|      inkscape:window-x="0" |  | ||||||
|      inkscape:window-y="0" |  | ||||||
|      inkscape:window-maximized="1" |  | ||||||
|      inkscape:current-layer="g166" /> |  | ||||||
|   <g |  | ||||||
|      transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" |  | ||||||
|      fill="#000000" |  | ||||||
|      stroke="none" |  | ||||||
|      id="g166"> |  | ||||||
|     <rect |  | ||||||
|        x="10" |  | ||||||
|        y="10" |  | ||||||
|        width="10239.509" |  | ||||||
|        height="10229.297" |  | ||||||
|        rx="1503.97427" |  | ||||||
|        fill="#f0f0f0" |  | ||||||
|        id="rect148" |  | ||||||
|        style="stroke-width:10.1935" /> |  | ||||||
|     <path |  | ||||||
|        d="M4784 8535 c-695 -66 -1296 -270 -1819 -616 -369 -245 -627 -477 -843 -763 -304 -402 -461 -948 -479 -1666 -9 -352 13 -581 82 -850 40 -156 61 -215 117 -323 55 -105 114 -169 194 -208 61 -30 69 -32 148 -27 179 12 320 123 356 281 8 38 6 64 -15 154 -14 59 -32 140 -41 178 -8 39 -21 95 -29 125 -41 165 -50 270 -50 565 0 261 3 309 28 480 30 214 28 242 -24 293 -41 40 -146 68 -312 84 -70 6 -127 15 -127 20 0 15 102 293 139 378 79 183 209 386 348 546 129 147 379 360 588 501 124 83 234 147 242 139 3 -3 -21 -36 -54 -73 -178 -203 -321 -426 -411 -643 -110 -265 -152 -484 -153 -804 -1 -338 43 -569 166 -877 56 -138 108 -235 192 -357 83 -119 95 -148 137 -323 54 -224 163 -505 223 -574 50 -57 102 -69 147 -34 46 36 34 86 -63 252 -65 113 -88 182 -107 332 -17 133 -20 142 -164 445 -148 313 -197 440 -250 650 -42 169 -60 311 -60 480 0 575 268 1118 733 1488 260 206 635 354 1060 418 142 21 566 26 722 9 323 -36 644 -133 905 -273 180 -96 322 -205 481 -368 464 -478 615 -1159 402 -1809 -22 -66 -78 -191 -142 -315 -275 -536 -251 -481 -271 -620 -10 -69 -28 -177 -40 -240 -27 -146 -37 -342 -20 -394 15 -47 51 -64 87 -41 73 49 164 319 184 549 17 208 39 271 158 461 197 313 285 530 342 845 31 167 34 543 6 685 -82 408 -210 682 -470 1005 -47 58 -83 107 -81 109 1 2 21 -7 43 -20 22 -13 77 -46 123 -73 324 -190 683 -538 883 -856 91 -145 268 -561 247 -582 -4 -3 -60 -16 -125 -27 -175 -31 -300 -80 -364 -141 -29 -26 -29 -54 -2 -190 64 -330 65 -751 3 -1081 -8 -46 -32 -145 -51 -219 -42 -157 -47 -246 -19 -329 20 -58 68 -118 120 -151 106 -65 273 -77 372 -27 140 71 251 273 328 592 55 229 76 429 76 725 0 991 -288 1664 -949 2213 -577 481 -1339 795 -2151 887 -154 18 -537 21 -696 5z" |  | ||||||
|        id="path150" /> |  | ||||||
|     <path |  | ||||||
|        d="M5963 4946 c-158 -51 -243 -191 -243 -398 0 -160 41 -281 122 -359 55 -53 99 -71 178 -72 55 -2 76 3 132 31 119 58 236 210 254 329 14 95 -50 278 -130 370 -72 82 -220 129 -313 99z m376 -302 c58 -49 66 -147 14 -198 -34 -34 -74 -34 -113 2 -57 50 -60 140 -8 193 36 36 67 37 107 3z" |  | ||||||
|        id="path152" /> |  | ||||||
|     <path |  | ||||||
|        d="M4089 4943 c-49 -8 -133 -66 -166 -116 -43 -64 -53 -102 -60 -224 -5 -91 -3 -110 21 -186 32 -103 76 -171 140 -214 126 -86 260 -73 354 33 73 82 97 158 97 310 0 121 0 121 -39 198 -51 101 -114 158 -203 186 -63 19 -88 22 -144 13z m-91 -294 c84 -29 79 -157 -8 -219 -65 -46 -110 -3 -113 107 -2 74 8 97 48 113 28 12 37 12 73 -1z" |  | ||||||
|        id="path154" /> |  | ||||||
|     <path |  | ||||||
|        d="M2585 3875 c-183 -29 -311 -98 -360 -194 -44 -88 -42 -163 6 -190 35 -20 65 -10 156 53 107 73 131 84 220 103 158 32 281 14 698 -102 301 -84 366 -93 423 -60 65 39 64 70 -5 146 -45 49 -65 58 -272 116 -516 143 -650 163 -866 128z" |  | ||||||
|        id="path156" /> |  | ||||||
|     <path |  | ||||||
|        d="M7350 3874 c-174 -23 -417 -78 -635 -145 -71 -22 -172 -49 -223 -59 -52 -10 -96 -21 -99 -24 -3 -3 -9 -24 -13 -48 -11 -57 7 -91 60 -112 74 -29 165 -17 465 63 295 79 375 94 505 94 82 1 131 -4 171 -16 58 -18 151 -69 188 -104 12 -11 38 -29 57 -39 31 -17 37 -17 62 -4 36 21 72 76 72 112 0 70 -68 167 -148 211 -77 42 -192 68 -317 72 -60 2 -126 1 -145 -1z" |  | ||||||
|        id="path158" /> |  | ||||||
|     <path |  | ||||||
|        d="M5404 3765 c-207 -147 -263 -172 -364 -162 -77 8 -129 33 -235 111 -86 63 -129 85 -142 73 -13 -13 50 -118 103 -170 82 -83 160 -119 277 -125 150 -8 252 32 350 137 70 75 111 171 73 171 -8 0 -36 -16 -62 -35z" |  | ||||||
|        id="path160" /> |  | ||||||
|     <path |  | ||||||
|        d="M3981 3144 c-266 -178 -442 -186 -926 -41 -302 91 -455 97 -612 23 -157 -75 -251 -214 -222 -330 21 -83 76 -225 110 -284 100 -170 378 -370 654 -471 376 -137 757 -167 1102 -86 278 66 504 187 689 370 108 107 176 197 239 316 25 49 51 91 56 95 16 9 31 -9 94 -111 196 -316 448 -516 810 -641 336 -117 740 -122 1125 -14 297 84 533 213 711 389 164 163 221 269 222 416 1 66 -4 90 -25 135 -78 168 -302 263 -558 237 -41 -4 -156 -30 -255 -57 -223 -62 -311 -79 -446 -87 -183 -10 -352 31 -554 135 l-98 50 -22 -24 c-40 -44 -49 -77 -30 -117 29 -63 136 -154 230 -198 114 -54 192 -70 367 -76 177 -7 282 9 503 72 280 81 392 93 508 54 106 -35 157 -84 157 -151 0 -51 -59 -145 -134 -215 -226 -211 -559 -347 -961 -393 -216 -24 -499 5 -699 72 -314 105 -535 288 -671 556 -42 84 -31 81 -206 56 -100 -14 -118 -14 -186 0 -41 9 -79 16 -84 16 -5 0 -22 -30 -39 -66 -112 -249 -373 -466 -681 -568 -355 -118 -819 -76 -1207 109 -284 136 -425 272 -474 458 -11 41 -10 52 3 75 33 60 129 94 259 95 83 0 151 -15 325 -68 353 -109 499 -125 706 -75 157 38 305 134 365 236 23 39 24 48 14 78 -13 41 -47 86 -63 86 -7 0 -50 -25 -96 -56z" |  | ||||||
|        id="path162" /> |  | ||||||
|   </g> |  | ||||||
| </svg> |  | ||||||
| Before Width: | Height: | Size: 5.8 KiB | 
| Before Width: | Height: | Size: 590 KiB | 
| @@ -1,137 +0,0 @@ | |||||||
| <?xml version="1.0" standalone="no"?> |  | ||||||
| <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN" |  | ||||||
|  "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd"> |  | ||||||
| <svg version="1.0" xmlns="http://www.w3.org/2000/svg" |  | ||||||
|  width="1024.000000pt" height="1024.000000pt" viewBox="0 0 1024.000000 1024.000000" |  | ||||||
|  preserveAspectRatio="xMidYMid meet"> |  | ||||||
|  |  | ||||||
| <g transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" |  | ||||||
| fill="#000000" stroke="none"> |  | ||||||
| <path d="M1884 10147 c-438 -456 -723 -1077 -825 -1797 -30 -207 -33 -589 -6 |  | ||||||
| -688 26 -97 31 -92 -81 -91 -349 0 -651 -131 -891 -388 l-81 -86 0 -313 c0 |  | ||||||
| -173 2 -314 4 -314 2 0 15 17 27 38 105 172 237 352 259 352 4 0 -3 -39 -16 |  | ||||||
| -87 -179 -642 -244 -1229 -215 -1938 11 -258 41 -647 62 -785 5 -36 14 -99 19 |  | ||||||
| -140 16 -113 67 -403 106 -600 8 -41 26 -119 40 -172 14 -53 24 -105 22 -115 |  | ||||||
| -3 -19 -114 198 -212 417 -31 69 -66 139 -76 155 l-20 30 0 -296 0 -295 68 |  | ||||||
| -105 c113 -172 229 -298 351 -380 64 -44 227 -124 300 -149 36 -12 114 -33 |  | ||||||
| 175 -47 130 -29 314 -35 462 -14 50 8 97 12 104 9 19 -6 -282 -123 -407 -158 |  | ||||||
| -163 -46 -305 -64 -458 -57 -140 7 -176 14 -359 72 -92 29 -127 36 -158 31 |  | ||||||
| -76 -12 -78 -15 -78 -124 l0 -97 43 -26 c76 -48 103 -58 218 -83 203 -44 260 |  | ||||||
| -51 424 -50 322 2 609 85 1027 296 148 74 162 79 250 89 98 10 132 24 358 144 |  | ||||||
| 81 43 150 59 150 35 0 -20 -48 -105 -96 -170 -132 -181 -370 -374 -601 -489 |  | ||||||
| -241 -120 -476 -181 -804 -210 -128 -11 -181 -34 -255 -113 -74 -78 -97 -144 |  | ||||||
| -95 -278 0 -60 7 -130 16 -163 55 -211 304 -437 615 -560 127 -50 353 -97 470 |  | ||||||
| -97 57 0 222 23 252 35 13 5 23 -7 47 -57 67 -141 187 -236 371 -292 l85 -27 |  | ||||||
| 450 3 450 3 118 38 c222 71 402 159 557 273 104 76 282 259 339 349 28 44 55 |  | ||||||
| 85 60 91 11 14 395 24 886 24 l361 0 60 -88 c225 -333 663 -595 1091 -652 116 |  | ||||||
| -16 379 -7 482 15 184 41 364 115 512 211 94 61 233 199 289 286 l43 67 56 |  | ||||||
| -28 c299 -150 843 -78 1089 144 63 57 112 139 112 184 -1 35 -32 101 -61 128 |  | ||||||
| -13 12 -105 64 -205 116 -206 107 -221 117 -286 170 -72 58 -106 112 -234 367 |  | ||||||
| -170 340 -232 438 -363 573 -36 37 -63 67 -60 67 4 0 27 -9 53 -20 25 -12 139 |  | ||||||
| -43 253 -70 115 -27 246 -58 293 -69 47 -11 122 -27 167 -35 65 -11 117 -32 |  | ||||||
| 250 -96 92 -45 192 -94 220 -108 29 -14 59 -33 68 -42 14 -15 8 -16 -68 -17 |  | ||||||
| -45 -1 -134 -2 -197 -3 -127 -3 -283 23 -366 59 -26 12 -52 21 -58 21 -19 0 |  | ||||||
| -130 77 -195 135 -67 60 -81 66 -81 34 0 -29 104 -129 182 -174 69 -41 195 |  | ||||||
| -85 303 -106 80 -16 480 -20 530 -5 l30 9 -30 14 -30 14 30 -6 c99 -21 159 |  | ||||||
| -17 298 19 149 38 249 77 321 122 l45 29 1 182 0 183 -113 -98 c-63 -53 -137 |  | ||||||
| -110 -166 -125 -133 -73 -289 -87 -426 -39 -90 31 -199 96 -192 115 2 7 35 15 |  | ||||||
| 80 19 139 13 270 39 395 80 127 41 299 117 365 162 21 14 42 26 47 26 6 0 10 |  | ||||||
| 86 10 218 0 215 0 217 -19 187 -17 -28 -97 -107 -166 -165 -105 -87 -270 -174 |  | ||||||
| -410 -216 -108 -32 -214 -56 -228 -51 -5 1 2 16 15 32 96 118 238 513 307 855 |  | ||||||
| 89 444 125 846 126 1405 0 395 -6 505 -51 866 -42 343 -63 459 -130 723 -63 |  | ||||||
| 243 -167 480 -309 701 -98 152 -120 194 -107 207 17 17 176 22 259 9 101 -16 |  | ||||||
| 160 -33 247 -73 189 -86 279 -179 415 -433 18 -33 37 -64 42 -70 5 -5 9 89 9 |  | ||||||
| 240 l-1 250 -77 69 c-151 134 -339 226 -560 271 -62 12 -115 25 -118 28 -4 3 |  | ||||||
| 2 30 12 59 16 46 19 83 18 278 -1 231 -12 334 -62 575 -71 338 -235 765 -393 |  | ||||||
| 1022 -23 38 -54 89 -70 115 -97 163 -237 350 -377 506 l-74 82 -177 0 -176 0 |  | ||||||
| 91 -92 c204 -208 333 -376 488 -631 186 -307 325 -643 405 -977 69 -291 80 |  | ||||||
| -381 56 -468 -10 -34 -23 -67 -31 -72 -9 -8 -80 -9 -234 -4 -266 8 -247 -1 |  | ||||||
| -340 171 -252 464 -543 835 -890 1133 -562 482 -1190 792 -1850 912 -121 22 |  | ||||||
| -145 23 -690 23 l-565 0 -120 -27 c-66 -15 -163 -36 -215 -47 -604 -133 -1184 |  | ||||||
| -448 -1625 -885 -276 -274 -477 -557 -660 -931 -74 -153 -135 -251 -208 -340 |  | ||||||
| -23 -28 -30 -30 -94 -31 -37 -1 -122 -5 -188 -9 -153 -10 -177 -1 -200 77 -45 |  | ||||||
| 151 0 415 140 835 162 483 398 878 756 1266 l91 97 -165 0 -164 0 -89 -93z |  | ||||||
| m3676 -162 c570 -67 1152 -279 1613 -586 465 -311 866 -729 1110 -1159 78 |  | ||||||
| -137 182 -371 217 -489 25 -82 48 -224 42 -257 -5 -30 -30 -40 -162 -68 -184 |  | ||||||
| -38 -221 -56 -315 -150 -73 -72 -89 -95 -143 -206 -87 -177 -126 -307 -178 |  | ||||||
| -598 -9 -50 -20 -95 -25 -98 -6 -3 -21 1 -34 10 -32 21 -288 130 -405 173 |  | ||||||
| -311 113 -759 239 -1045 293 -437 83 -625 101 -1115 107 -430 5 -548 0 -846 |  | ||||||
| -38 -568 -71 -1208 -279 -1583 -514 -57 -36 -107 -65 -111 -65 -4 0 -11 17 |  | ||||||
| -15 38 -24 135 -126 458 -183 583 -83 184 -243 382 -388 480 -39 26 -74 54 |  | ||||||
| -77 61 -5 13 19 80 70 203 14 33 50 121 80 195 284 706 734 1232 1389 1623 |  | ||||||
| 420 251 966 428 1449 470 122 11 541 6 655 -8z m-4078 -2636 c256 -52 479 |  | ||||||
| -221 607 -461 44 -81 99 -235 124 -343 57 -249 114 -543 147 -770 63 -417 58 |  | ||||||
| -1157 -11 -1695 -14 -112 -67 -419 -84 -489 -32 -134 -66 -252 -95 -331 -17 |  | ||||||
| -47 -39 -107 -49 -135 -26 -71 -120 -260 -164 -327 -20 -31 -53 -69 -72 -83 |  | ||||||
| -75 -58 -219 -101 -355 -105 -85 -3 -82 -7 -46 68 36 74 50 105 90 202 15 36 |  | ||||||
| 32 74 37 85 25 55 94 258 125 367 42 150 75 287 93 388 34 184 59 412 76 690 |  | ||||||
| 22 348 21 450 -16 930 -25 333 -111 861 -198 1215 -25 103 -101 327 -131 385 |  | ||||||
| -34 67 -108 179 -152 229 -76 88 -82 141 -17 173 18 10 35 18 36 18 1 0 26 -5 |  | ||||||
| 55 -11z m-491 -123 c299 -144 526 -765 614 -1681 45 -472 38 -1171 -16 -1595 |  | ||||||
| -66 -520 -179 -884 -368 -1194 -57 -94 -141 -196 -161 -196 -43 0 -215 142 |  | ||||||
| -313 258 -103 123 -219 414 -277 698 -129 623 -153 1928 -49 2609 75 495 180 |  | ||||||
| 842 308 1021 63 87 93 106 166 102 33 -2 76 -12 96 -22z m7908 -18 c25 -12 82 |  | ||||||
| -58 126 -102 194 -195 347 -570 454 -1111 58 -292 85 -524 106 -903 40 -715 |  | ||||||
| -2 -1279 -136 -1817 -78 -314 -216 -629 -307 -705 -57 -47 -150 -90 -195 -90 |  | ||||||
| -53 0 -122 36 -164 85 -46 53 -240 432 -305 595 -116 290 -148 590 -163 1520 |  | ||||||
| -18 1111 53 1735 250 2179 48 108 141 269 188 326 45 53 74 58 146 23z m-605 |  | ||||||
| -168 c30 -11 33 -32 10 -86 -143 -338 -218 -739 -254 -1349 -19 -324 -9 -1015 |  | ||||||
| 20 -1465 13 -193 15 -279 6 -287 -21 -22 -82 112 -121 266 -115 457 -123 1127 |  | ||||||
| -20 1782 15 94 25 199 25 262 0 113 13 177 85 432 56 202 96 305 152 398 36 |  | ||||||
| 59 49 66 97 47z m-2813 -365 c222 -23 294 -32 429 -55 656 -110 1118 -249 |  | ||||||
| 1549 -464 153 -77 210 -111 218 -133 3 -9 -3 -113 -15 -232 -12 -119 -27 -290 |  | ||||||
| -34 -381 -17 -228 -17 -856 0 -990 20 -163 59 -374 92 -500 17 -63 37 -167 45 |  | ||||||
| -230 8 -63 22 -138 30 -167 31 -106 152 -227 270 -272 33 -13 67 -26 75 -30 |  | ||||||
| 16 -8 294 -548 306 -595 l7 -29 -79 6 c-48 3 -133 22 -219 47 -613 182 -800 |  | ||||||
| 219 -1155 227 -242 6 -372 -6 -485 -46 -111 -39 -143 -80 -152 -195 -10 -127 |  | ||||||
| 32 -283 149 -561 189 -449 438 -737 743 -860 151 -61 228 -76 395 -79 179 -3 |  | ||||||
| 228 -15 270 -63 38 -43 40 -102 7 -229 -29 -107 -67 -185 -126 -256 -136 -162 |  | ||||||
| -311 -251 -546 -278 -441 -50 -865 95 -1184 403 -150 145 -244 310 -401 707 |  | ||||||
| -159 398 -271 599 -425 755 -82 84 -114 109 -179 141 -70 34 -90 39 -164 43 |  | ||||||
| -100 4 -167 -12 -189 -46 -30 -45 -66 -169 -198 -673 -83 -315 -163 -522 -276 |  | ||||||
| -710 -241 -406 -581 -631 -1089 -722 -46 -8 -148 -13 -265 -12 -160 0 -204 3 |  | ||||||
| -277 22 -197 49 -294 130 -347 289 -46 136 -15 274 74 336 22 15 111 52 197 |  | ||||||
| 82 286 99 398 165 573 339 147 147 244 288 322 466 111 254 162 475 142 616 |  | ||||||
| -13 96 -23 121 -62 162 -66 69 -159 26 -190 -89 -8 -30 -29 -130 -47 -224 -40 |  | ||||||
| -205 -75 -318 -142 -449 -108 -210 -243 -352 -453 -478 -109 -65 -161 -87 |  | ||||||
| -309 -128 -161 -44 -210 -68 -290 -141 -95 -86 -150 -235 -127 -339 16 -70 8 |  | ||||||
| -74 -132 -66 -302 17 -623 128 -771 267 -177 165 -178 393 -3 428 29 6 111 20 |  | ||||||
| 182 31 260 41 443 97 651 199 227 111 379 220 588 421 149 144 363 413 430 |  | ||||||
| 539 41 79 86 211 86 255 0 41 -32 91 -71 112 -53 27 -110 15 -265 -59 -76 -36 |  | ||||||
| -209 -93 -294 -127 -85 -33 -166 -65 -179 -71 -13 -5 -26 -7 -29 -5 -5 6 30 |  | ||||||
| 77 170 346 79 153 111 224 183 410 90 232 107 267 168 347 68 91 71 113 30 |  | ||||||
| 230 -32 93 -33 127 -19 818 9 461 4 630 -30 1060 -18 234 -19 249 -10 263 18 |  | ||||||
| 29 135 99 296 178 532 261 1153 421 1760 454 204 11 611 3 791 -15z m1629 |  | ||||||
| -4090 c183 -13 507 -96 635 -163 174 -92 443 -407 684 -802 136 -223 184 -291 |  | ||||||
| 265 -378 71 -75 128 -115 188 -133 18 -5 85 -14 148 -19 160 -14 170 -17 185 |  | ||||||
| -53 16 -39 6 -69 -38 -116 -104 -109 -427 -145 -776 -86 -88 15 -161 50 -185 |  | ||||||
| 88 -8 12 -16 60 -19 107 -11 166 -52 230 -192 295 -158 73 -181 104 -250 340 |  | ||||||
| -14 50 -46 153 -70 230 -24 77 -49 168 -55 202 -29 147 -158 205 -294 133 -57 |  | ||||||
| -29 -122 -77 -183 -134 -28 -26 -33 -37 -33 -77 0 -55 19 -104 57 -149 36 -43 |  | ||||||
| 79 -60 151 -60 47 0 73 7 140 40 l82 39 14 -22 c37 -56 134 -421 122 -453 -15 |  | ||||||
| -38 -228 -2 -340 57 -126 65 -291 222 -400 379 -123 175 -246 488 -246 625 0 |  | ||||||
| 73 6 83 54 96 42 12 214 28 251 23 11 -2 58 -5 105 -9z m-2122 -601 c116 -58 |  | ||||||
| 283 -291 390 -544 53 -126 152 -388 152 -403 0 -8 -10 -17 -22 -21 -30 -8 |  | ||||||
| -772 -8 -820 0 -20 4 -39 12 -42 19 -2 7 2 44 9 81 7 38 27 139 44 224 50 258 |  | ||||||
| 131 551 178 643 17 34 43 35 111 1z"/> |  | ||||||
| <path d="M3903 5575 c-345 -79 -564 -312 -613 -650 -35 -245 31 -497 178 -687 |  | ||||||
| 151 -195 345 -300 597 -324 145 -13 345 39 495 129 112 67 260 215 313 312 |  | ||||||
| 134 245 140 552 14 785 -109 202 -291 344 -532 415 -75 22 -114 27 -240 31 |  | ||||||
| -106 2 -168 -1 -212 -11z m8 -326 c127 -55 196 -232 138 -359 -58 -129 -200 |  | ||||||
| -188 -345 -145 -147 43 -214 213 -146 368 26 58 109 142 153 156 48 15 141 5 |  | ||||||
| 200 -20z"/> |  | ||||||
| <path d="M3696 5084 c-11 -30 -6 -53 20 -83 29 -35 59 -39 92 -12 30 26 30 74 |  | ||||||
| 0 95 -30 21 -104 21 -112 0z"/> |  | ||||||
| <path d="M6154 5579 c-119 -20 -277 -91 -375 -169 -124 -98 -216 -227 -268 |  | ||||||
| -375 -36 -104 -42 -313 -12 -440 66 -280 279 -530 531 -623 261 -96 558 -54 |  | ||||||
| 781 112 187 139 306 323 354 549 26 121 12 297 -34 418 -117 309 -471 542 |  | ||||||
| -820 538 -58 -1 -129 -5 -157 -10z m60 -313 c87 -51 140 -150 140 -261 -1 |  | ||||||
| -111 -53 -188 -151 -225 -180 -67 -352 50 -353 240 0 117 53 214 142 259 60 |  | ||||||
| 31 157 25 222 -13z"/> |  | ||||||
| <path d="M6016 5065 c-9 -9 -16 -23 -16 -32 0 -25 30 -81 47 -87 20 -8 56 23 |  | ||||||
| 48 41 -2 8 0 11 5 8 17 -11 11 32 -7 53 -25 29 -58 36 -77 17z m50 -53 c1 -7 |  | ||||||
| 0 -8 -3 -2 -2 5 -9 8 -14 4 -5 -3 -9 0 -9 6 0 15 23 7 26 -8z"/> |  | ||||||
| <path d="M10020 505 c24 -24 46 -42 49 -40 6 7 -70 85 -83 85 -6 0 10 -20 34 |  | ||||||
| -45z"/> |  | ||||||
| </g> |  | ||||||
| </svg> |  | ||||||
| Before Width: | Height: | Size: 9.3 KiB | 
| Before Width: | Height: | Size: 302 KiB | 
| @@ -1,66 +0,0 @@ | |||||||
| DROP TABLE IF EXISTS artist; |  | ||||||
| CREATE TABLE artist ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     name TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS artist_release_group; |  | ||||||
| CREATE TABLE artist_release_group ( |  | ||||||
|     artist_id TEXT NOT NULL, |  | ||||||
|     release_group_id TEXT NOT NULL |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS artist_track; |  | ||||||
| CREATE TABLE artist_track ( |  | ||||||
|     artist_id TEXT NOT NULL, |  | ||||||
|     track_id TEXT NOT NULL |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS release_group; |  | ||||||
| CREATE TABLE release_group ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     albumartist TEXT, |  | ||||||
|     albumsort INT, |  | ||||||
|     musicbrainz_albumtype TEXT, |  | ||||||
|     compilation TEXT, |  | ||||||
|     album_artist_id TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS release_; |  | ||||||
| CREATE TABLE release_ ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     release_group_id TEXT NOT NULL, |  | ||||||
|     title TEXT,  |  | ||||||
|     copyright TEXT, |  | ||||||
|     album_status TEXT, |  | ||||||
|     language TEXT, |  | ||||||
|     year TEXT, |  | ||||||
|     date TEXT, |  | ||||||
|     country TEXT, |  | ||||||
|     barcode TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS track; |  | ||||||
| CREATE TABLE track ( |  | ||||||
|     id TEXT PRIMARY KEY NOT NULL, |  | ||||||
|     downloaded BOOLEAN NOT NULL DEFAULT 0, |  | ||||||
|     release_id TEXT NOT NULL, |  | ||||||
|     track TEXT, |  | ||||||
|     length INT, |  | ||||||
|     tracknumber TEXT, |  | ||||||
|     isrc TEXT, |  | ||||||
|     genre TEXT, |  | ||||||
|     lyrics TEXT, |  | ||||||
|     path TEXT, |  | ||||||
|     file TEXT, |  | ||||||
|     url TEXT, |  | ||||||
|     src TEXT |  | ||||||
| ); |  | ||||||
|  |  | ||||||
| DROP TABLE IF EXISTS source; |  | ||||||
| CREATE TABLE source ( |  | ||||||
|     track_id TEXT NOT NULL, |  | ||||||
|     src TEXT NOT NULL, |  | ||||||
|     url TEXT NOT NULL, |  | ||||||
|     valid BOOLEAN NOT NULL DEFAULT 1 |  | ||||||
| ); |  | ||||||
| @@ -6,8 +6,8 @@ logging.getLogger().setLevel(logging.DEBUG) | |||||||
|  |  | ||||||
| if __name__ == "__main__": | if __name__ == "__main__": | ||||||
|     commands = [ |     commands = [ | ||||||
|         "s: #a Ghost Bath", |         "s: #a Crystal F", | ||||||
|         "4", |         "d: 20", | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|      |      | ||||||
|   | |||||||
| @@ -2,91 +2,30 @@ import music_kraken | |||||||
| from music_kraken.objects import Song, Album, Artist, Collection | from music_kraken.objects import Song, Album, Artist, Collection | ||||||
|  |  | ||||||
| if __name__ == "__main__": | if __name__ == "__main__": | ||||||
|     artist: Artist = Artist( |     album_1 = Album( | ||||||
|         name="artist", |  | ||||||
|         main_album_list=[ |  | ||||||
|             Album( |  | ||||||
|         title="album", |         title="album", | ||||||
|         song_list=[ |         song_list=[ | ||||||
|                     Song( |             Song(title="song", main_artist_list=[Artist(name="artist")]), | ||||||
|                         title="song", |  | ||||||
|                         album_list=[ |  | ||||||
|                             Album( |  | ||||||
|                                 title="album",  |  | ||||||
|                                 albumsort=123, |  | ||||||
|                                 main_artist=Artist(name="artist"), |  | ||||||
|                             ), |  | ||||||
|         ], |         ], | ||||||
|                     ), |         artist_list=[ | ||||||
|                     Song( |             Artist(name="artist 3"), | ||||||
|                         title="other_song", |  | ||||||
|                         album_list=[ |  | ||||||
|                             Album(title="album", albumsort=423), |  | ||||||
|                         ], |  | ||||||
|                     ), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|             Album(title="album", barcode="1234567890123"), |  | ||||||
|         ] |         ] | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |     album_2 = Album( | ||||||
|     other_artist: Artist = Artist( |  | ||||||
|         name="artist", |  | ||||||
|         main_album_list=[ |  | ||||||
|             Album( |  | ||||||
|         title="album", |         title="album", | ||||||
|         song_list=[ |         song_list=[ | ||||||
|                     Song( |             Song(title="song", main_artist_list=[Artist(name="artist 2")]), | ||||||
|                         title="song", |  | ||||||
|                         album_list=[ |  | ||||||
|                             Album( |  | ||||||
|                                 title="album",  |  | ||||||
|                                 albumsort=123, |  | ||||||
|                                 main_artist=Artist(name="other_artist"), |  | ||||||
|                             ), |  | ||||||
|         ], |         ], | ||||||
|                     ), |         artist_list=[ | ||||||
|                     Song( |             Artist(name="artist"), | ||||||
|                         title="other_song", |  | ||||||
|                         album_list=[ |  | ||||||
|                             Album(title="album", albumsort=423), |  | ||||||
|                         ], |  | ||||||
|                     ), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|             Album(title="album", barcode="1234567890123"), |  | ||||||
|         ] |         ] | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     artist.merge(other_artist) |     album_1.merge(album_2) | ||||||
|  |  | ||||||
|     a = artist.main_album_collection[0] |  | ||||||
|     b = a.song_collection[0].album_collection[0] |  | ||||||
|     c = a.song_collection[1].album_collection[0] |  | ||||||
|     d = b.song_collection[0].album_collection[0] |  | ||||||
|     e = d.song_collection[0].album_collection[0] |  | ||||||
|     f = e.song_collection[0].album_collection[0] |  | ||||||
|     g = f.song_collection[0].album_collection[0] |  | ||||||
|  |  | ||||||
|     print(a.id, a.title, a.barcode, a.albumsort) |  | ||||||
|     print(b.id, b.title, b.barcode, b.albumsort) |  | ||||||
|     print(c.id, c.title, c.barcode, c.albumsort) |  | ||||||
|     print(d.id, d.title, d.barcode, d.albumsort) |  | ||||||
|     print(e.id, e.title, e.barcode, e.albumsort) |  | ||||||
|     print(f.id, f.title, f.barcode, f.albumsort) |  | ||||||
|     print(g.id, g.title, g.barcode, g.albumsort) |  | ||||||
|     print() |     print() | ||||||
|  |     print(*(f"{a.title_string} ; {a.id}" for a in album_1.artist_collection.data), sep=" | ") | ||||||
|  |  | ||||||
|     d.title = "new_title" |     print(id(album_1.artist_collection), id(album_2.artist_collection)) | ||||||
|  |     print(id(album_1.song_collection[0].main_artist_collection), id(album_2.song_collection[0].main_artist_collection)) | ||||||
|     print(a.id, a.title, a.barcode, a.albumsort) |  | ||||||
|     print(b.id, b.title, b.barcode, b.albumsort) |  | ||||||
|     print(c.id, c.title, c.barcode, c.albumsort) |  | ||||||
|     print(d.id, d.title, d.barcode, d.albumsort) |  | ||||||
|     print(e.id, e.title, e.barcode, e.albumsort) |  | ||||||
|     print(f.id, f.title, f.barcode, f.albumsort) |  | ||||||
|     print(g.id, g.title, g.barcode, g.albumsort) |  | ||||||
|     print() |  | ||||||
|  |  | ||||||
|     print(artist.main_album_collection._indexed_values) |  | ||||||
| @@ -13,6 +13,8 @@ from ..utils.support_classes.query import Query | |||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.support_classes.download_result import DownloadResult | ||||||
| from ..utils.exception.download import UrlNotFoundException | from ..utils.exception.download import UrlNotFoundException | ||||||
| from ..utils.enums.colors import BColors | from ..utils.enums.colors import BColors | ||||||
|  | from .. import console | ||||||
|  |  | ||||||
| from ..download.results import Results, Option, PageResults | from ..download.results import Results, Option, PageResults | ||||||
| from ..download.page_attributes import Pages | from ..download.page_attributes import Pages | ||||||
| from ..pages import Page | from ..pages import Page | ||||||
| @@ -174,12 +176,14 @@ class Downloader: | |||||||
|         page_count = 0 |         page_count = 0 | ||||||
|         for option in self.current_results.formated_generator(max_items_per_page=self.max_displayed_options): |         for option in self.current_results.formated_generator(max_items_per_page=self.max_displayed_options): | ||||||
|             if isinstance(option, Option): |             if isinstance(option, Option): | ||||||
|                 color = BColors.BOLD.value if self.pages.is_downloadable(option.music_object) else BColors.GREY.value |                 _downloadable = self.pages.is_downloadable(option.music_object) | ||||||
|                 print(f"{color}{option.index:0{self.option_digits}} {option.music_object.option_string}{BColors.ENDC.value}") |  | ||||||
|  |                 r = f"{BColors.GREY.value}{option.index:0{self.option_digits}}{BColors.ENDC.value} {option.music_object.option_string}" | ||||||
|  |                 print(r) | ||||||
|             else: |             else: | ||||||
|                 prefix = ALPHABET[page_count % len(ALPHABET)] |                 prefix = ALPHABET[page_count % len(ALPHABET)] | ||||||
|                 print( |                 print( | ||||||
|                     f"{BColors.HEADER.value}({prefix}) ------------------------{option.__name__:{PAGE_NAME_FILL}<{MAX_PAGE_LEN}}------------{BColors.ENDC.value}") |                     f"{BColors.HEADER.value}({prefix}) --------------------------------{option.__name__:{PAGE_NAME_FILL}<{MAX_PAGE_LEN}}--------------------{BColors.ENDC.value}") | ||||||
|  |  | ||||||
|                 self.page_dict[prefix] = option |                 self.page_dict[prefix] = option | ||||||
|                 self.page_dict[option.__name__] = option |                 self.page_dict[option.__name__] = option | ||||||
| @@ -211,6 +215,9 @@ class Downloader: | |||||||
|         return True |         return True | ||||||
|  |  | ||||||
|     def _process_parsed(self, key_text: Dict[str, str], query: str) -> Query: |     def _process_parsed(self, key_text: Dict[str, str], query: str) -> Query: | ||||||
|  |         # strip all the values in key_text | ||||||
|  |         key_text = {key: value.strip() for key, value in key_text.items()} | ||||||
|  |  | ||||||
|         song = None if not "t" in key_text else Song(title=key_text["t"], dynamic=True) |         song = None if not "t" in key_text else Song(title=key_text["t"], dynamic=True) | ||||||
|         album = None if not "r" in key_text else Album(title=key_text["r"], dynamic=True) |         album = None if not "r" in key_text else Album(title=key_text["r"], dynamic=True) | ||||||
|         artist = None if not "a" in key_text else Artist(name=key_text["a"], dynamic=True) |         artist = None if not "a" in key_text else Artist(name=key_text["a"], dynamic=True) | ||||||
|   | |||||||
| @@ -1,12 +1,13 @@ | |||||||
| import json | import json | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass, field | ||||||
| from datetime import datetime, timedelta | from datetime import datetime, timedelta | ||||||
| from typing import List, Optional | from typing import List, Optional | ||||||
| from functools import lru_cache | from functools import lru_cache | ||||||
| import logging | import logging | ||||||
|  |  | ||||||
| from ..utils.config import main_settings | from ..utils.config import main_settings | ||||||
|  | from ..utils.string_processing import fit_to_file_system | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | @dataclass | ||||||
| @@ -17,6 +18,8 @@ class CacheAttribute: | |||||||
|     created: datetime |     created: datetime | ||||||
|     expires: datetime |     expires: datetime | ||||||
|  |  | ||||||
|  |     additional_info: dict = field(default_factory=dict) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def id(self): |     def id(self): | ||||||
|         return f"{self.module}_{self.name}" |         return f"{self.module}_{self.name}" | ||||||
| @@ -31,6 +34,12 @@ class CacheAttribute: | |||||||
|         return self.__dict__ == other.__dict__ |         return self.__dict__ == other.__dict__ | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @dataclass | ||||||
|  | class CacheResult: | ||||||
|  |     content: bytes | ||||||
|  |     attribute: CacheAttribute | ||||||
|  |  | ||||||
|  |  | ||||||
| class Cache: | class Cache: | ||||||
|     def __init__(self, module: str, logger: logging.Logger): |     def __init__(self, module: str, logger: logging.Logger): | ||||||
|         self.module = module |         self.module = module | ||||||
| @@ -48,6 +57,7 @@ class Cache: | |||||||
|  |  | ||||||
|         self._time_fields = {"created", "expires"} |         self._time_fields = {"created", "expires"} | ||||||
|         with self.index.open("r") as i: |         with self.index.open("r") as i: | ||||||
|  |             try: | ||||||
|                 for c in json.loads(i.read()): |                 for c in json.loads(i.read()): | ||||||
|                     for key in self._time_fields: |                     for key in self._time_fields: | ||||||
|                         c[key] = datetime.fromisoformat(c[key]) |                         c[key] = datetime.fromisoformat(c[key]) | ||||||
| @@ -55,6 +65,8 @@ class Cache: | |||||||
|                     ca = CacheAttribute(**c) |                     ca = CacheAttribute(**c) | ||||||
|                     self.cached_attributes.append(ca) |                     self.cached_attributes.append(ca) | ||||||
|                     self._id_to_attribute[ca.id] = ca |                     self._id_to_attribute[ca.id] = ca | ||||||
|  |             except json.JSONDecodeError: | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|     @lru_cache() |     @lru_cache() | ||||||
|     def _init_module(self, module: str) -> Path: |     def _init_module(self, module: str) -> Path: | ||||||
| @@ -63,7 +75,7 @@ class Cache: | |||||||
|         :return: the module path |         :return: the module path | ||||||
|         """ |         """ | ||||||
|         r = Path(self._dir, module) |         r = Path(self._dir, module) | ||||||
|         r.mkdir(exist_ok=True) |         r.mkdir(exist_ok=True, parents=True) | ||||||
|         return r |         return r | ||||||
|  |  | ||||||
|     def _write_index(self, indent: int = 4): |     def _write_index(self, indent: int = 4): | ||||||
| @@ -99,7 +111,7 @@ class Cache: | |||||||
|  |  | ||||||
|         return True |         return True | ||||||
|  |  | ||||||
|     def set(self, content: bytes, name: str, expires_in: float = 10, module: str = ""): |     def set(self, content: bytes, name: str, expires_in: float = 10, module: str = "", additional_info: dict = None): | ||||||
|         """ |         """ | ||||||
|         :param content: |         :param content: | ||||||
|         :param module: |         :param module: | ||||||
| @@ -110,6 +122,7 @@ class Cache: | |||||||
|         if name == "": |         if name == "": | ||||||
|             return |             return | ||||||
|  |  | ||||||
|  |         additional_info = additional_info or {} | ||||||
|         module = self.module if module == "" else module |         module = self.module if module == "" else module | ||||||
|  |  | ||||||
|         module_path = self._init_module(module) |         module_path = self._init_module(module) | ||||||
| @@ -119,16 +132,17 @@ class Cache: | |||||||
|             name=name, |             name=name, | ||||||
|             created=datetime.now(), |             created=datetime.now(), | ||||||
|             expires=datetime.now() + timedelta(days=expires_in), |             expires=datetime.now() + timedelta(days=expires_in), | ||||||
|  |             additional_info=additional_info, | ||||||
|         ) |         ) | ||||||
|         self._write_attribute(cache_attribute) |         self._write_attribute(cache_attribute) | ||||||
|  |  | ||||||
|         cache_path = Path(module_path, name) |         cache_path = fit_to_file_system(Path(module_path, name), hidden_ok=True) | ||||||
|         with cache_path.open("wb") as content_file: |         with cache_path.open("wb") as content_file: | ||||||
|             self.logger.debug(f"writing cache to {cache_path}") |             self.logger.debug(f"writing cache to {cache_path}") | ||||||
|             content_file.write(content) |             content_file.write(content) | ||||||
|  |  | ||||||
|     def get(self, name: str) -> Optional[bytes]: |     def get(self, name: str) -> Optional[CacheResult]: | ||||||
|         path = Path(self._dir, self.module, name) |         path = fit_to_file_system(Path(self._dir, self.module, name), hidden_ok=True) | ||||||
|  |  | ||||||
|         if not path.is_file(): |         if not path.is_file(): | ||||||
|             return None |             return None | ||||||
| @@ -139,7 +153,7 @@ class Cache: | |||||||
|             return |             return | ||||||
|  |  | ||||||
|         with path.open("rb") as f: |         with path.open("rb") as f: | ||||||
|             return f.read() |             return CacheResult(content=f.read(), attribute=existing_attribute) | ||||||
|  |  | ||||||
|     def clean(self): |     def clean(self): | ||||||
|         keep = set() |         keep = set() | ||||||
| @@ -148,7 +162,7 @@ class Cache: | |||||||
|             if ca.name == "": |             if ca.name == "": | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             file = Path(self._dir, ca.module, ca.name) |             file = fit_to_file_system(Path(self._dir, ca.module, ca.name), hidden_ok=True) | ||||||
|  |  | ||||||
|             if not ca.is_valid: |             if not ca.is_valid: | ||||||
|                 self.logger.debug(f"deleting cache {ca.id}") |                 self.logger.debug(f"deleting cache {ca.id}") | ||||||
|   | |||||||
| @@ -15,6 +15,8 @@ from tqdm import tqdm | |||||||
| from .cache import Cache | from .cache import Cache | ||||||
| from .rotating import RotatingProxy | from .rotating import RotatingProxy | ||||||
| from ..objects import Target | from ..objects import Target | ||||||
|  | from ..utils import request_trace | ||||||
|  | from ..utils.string_processing import shorten_display_url | ||||||
| from ..utils.config import main_settings | from ..utils.config import main_settings | ||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.support_classes.download_result import DownloadResult | ||||||
| from ..utils.hacking import merge_args | from ..utils.hacking import merge_args | ||||||
| @@ -123,12 +125,17 @@ class Connection: | |||||||
|  |  | ||||||
|         return headers |         return headers | ||||||
|  |  | ||||||
|     def save(self, r: requests.Response, name: str, error: bool = False, **kwargs): |     def save(self, r: requests.Response, name: str, error: bool = False, no_update_if_valid_exists: bool = False, **kwargs): | ||||||
|         n_kwargs = {} |         n_kwargs = {} | ||||||
|         if error: |         if error: | ||||||
|             n_kwargs["module"] = "failed_requests" |             n_kwargs["module"] = "failed_requests" | ||||||
|  |  | ||||||
|         self.cache.set(r.content, name, expires_in=kwargs.get("expires_in", self.cache_expiring_duration), **n_kwargs) |         if self.cache.get(name) is not None and no_update_if_valid_exists: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         self.cache.set(r.content, name, expires_in=kwargs.get("expires_in", self.cache_expiring_duration), additional_info={ | ||||||
|  |             "encoding": r.encoding, | ||||||
|  |         }, **n_kwargs) | ||||||
|  |  | ||||||
|     def request( |     def request( | ||||||
|             self, |             self, | ||||||
| @@ -143,6 +150,7 @@ class Connection: | |||||||
|             sleep_after_404: float = None, |             sleep_after_404: float = None, | ||||||
|             is_heartbeat: bool = False, |             is_heartbeat: bool = False, | ||||||
|             disable_cache: bool = None, |             disable_cache: bool = None, | ||||||
|  |             enable_cache_readonly: bool = False, | ||||||
|             method: str = None, |             method: str = None, | ||||||
|             name: str = "", |             name: str = "", | ||||||
|             exclude_headers: List[str] = None, |             exclude_headers: List[str] = None, | ||||||
| @@ -152,7 +160,7 @@ class Connection: | |||||||
|             raise AttributeError("method is not set.") |             raise AttributeError("method is not set.") | ||||||
|         method = method.upper() |         method = method.upper() | ||||||
|         headers = dict() if headers is None else headers |         headers = dict() if headers is None else headers | ||||||
|         disable_cache = headers.get("Cache-Control", "").lower() == "no-cache" if disable_cache is None else disable_cache |         disable_cache = (headers.get("Cache-Control", "").lower() == "no-cache" if disable_cache is None else disable_cache) or kwargs.get("stream", False) | ||||||
|         accepted_response_codes = self.ACCEPTED_RESPONSE_CODES if accepted_response_codes is None else accepted_response_codes |         accepted_response_codes = self.ACCEPTED_RESPONSE_CODES if accepted_response_codes is None else accepted_response_codes | ||||||
|          |          | ||||||
|         current_kwargs = copy.copy(locals()) |         current_kwargs = copy.copy(locals()) | ||||||
| @@ -160,6 +168,7 @@ class Connection: | |||||||
|         current_kwargs.update(**kwargs) |         current_kwargs.update(**kwargs) | ||||||
|  |  | ||||||
|         parsed_url = urlparse(url) |         parsed_url = urlparse(url) | ||||||
|  |         trace_string = f"{method} {shorten_display_url(url)} \t{'[stream]' if kwargs.get('stream', False) else ''}" | ||||||
|          |          | ||||||
|         if not raw_headers: |         if not raw_headers: | ||||||
|             _headers = copy.copy(self.HEADER_VALUES) |             _headers = copy.copy(self.HEADER_VALUES) | ||||||
| @@ -175,15 +184,23 @@ class Connection: | |||||||
|  |  | ||||||
|         request_url = parsed_url.geturl() if not raw_url else url |         request_url = parsed_url.geturl() if not raw_url else url | ||||||
|  |  | ||||||
|         if name != "" and not disable_cache: |         if name != "" and (not disable_cache or enable_cache_readonly): | ||||||
|             cached = self.cache.get(name) |             cached = self.cache.get(name) | ||||||
|  |  | ||||||
|             if cached is not None: |             if cached is not None: | ||||||
|  |                 request_trace(f"{trace_string}\t[cached]") | ||||||
|  |  | ||||||
|                 with responses.RequestsMock() as resp: |                 with responses.RequestsMock() as resp: | ||||||
|  |                     additional_info = cached.attribute.additional_info | ||||||
|  |  | ||||||
|  |                     body = cached.content | ||||||
|  |                     if "encoding" in additional_info: | ||||||
|  |                         body = body.decode(additional_info["encoding"]) | ||||||
|  |  | ||||||
|                     resp.add( |                     resp.add( | ||||||
|                         method=method, |                         method=method, | ||||||
|                         url=request_url, |                         url=request_url, | ||||||
|                         body=cached, |                         body=body, | ||||||
|                     ) |                     ) | ||||||
|                     return requests.request(method=method, url=url, timeout=timeout, headers=headers, **kwargs) |                     return requests.request(method=method, url=url, timeout=timeout, headers=headers, **kwargs) | ||||||
|  |  | ||||||
| @@ -199,6 +216,9 @@ class Connection: | |||||||
|             if header in headers: |             if header in headers: | ||||||
|                 del headers[header] |                 del headers[header] | ||||||
|  |  | ||||||
|  |         if try_count <= 0: | ||||||
|  |             request_trace(trace_string) | ||||||
|  |  | ||||||
|         r = None |         r = None | ||||||
|         connection_failed = False |         connection_failed = False | ||||||
|         try: |         try: | ||||||
| @@ -208,7 +228,7 @@ class Connection: | |||||||
|                     pass |                     pass | ||||||
|              |              | ||||||
|             self.lock = True |             self.lock = True | ||||||
|             r: requests.Response = requests.request(method=method, url=url, timeout=timeout, headers=headers, **kwargs) |             r: requests.Response = self.session.request(method=method, url=url, timeout=timeout, headers=headers, **kwargs) | ||||||
|  |  | ||||||
|             if r.status_code in accepted_response_codes: |             if r.status_code in accepted_response_codes: | ||||||
|                 if not disable_cache: |                 if not disable_cache: | ||||||
| @@ -228,10 +248,10 @@ class Connection: | |||||||
|             self.lock = False |             self.lock = False | ||||||
|  |  | ||||||
|         if r is None: |         if r is None: | ||||||
|             self.LOGGER.warning(f"{self.HOST.netloc} didn't respond at {url}. ({try_count}-{self.TRIES})") |             self.LOGGER.warning(f"{parsed_url.netloc} didn't respond at {url}. ({try_count}-{self.TRIES})") | ||||||
|             self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in headers.items())) |             self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in headers.items())) | ||||||
|         else: |         else: | ||||||
|             self.LOGGER.warning(f"{self.HOST.netloc} responded wit {r.status_code} at {url}. ({try_count}-{self.TRIES})") |             self.LOGGER.warning(f"{parsed_url.netloc} responded wit {r.status_code} at {url}. ({try_count}-{self.TRIES})") | ||||||
|             self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.request.headers.items())) |             self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.request.headers.items())) | ||||||
|             self.LOGGER.debug("response headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.headers.items())) |             self.LOGGER.debug("response headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.headers.items())) | ||||||
|             self.LOGGER.debug(r.content) |             self.LOGGER.debug(r.content) | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ from ..pages import Page, EncyclopaediaMetallum, Musify, YouTube, YoutubeMusic, | |||||||
|  |  | ||||||
|  |  | ||||||
| ALL_PAGES: Set[Type[Page]] = { | ALL_PAGES: Set[Type[Page]] = { | ||||||
|     EncyclopaediaMetallum, |     # EncyclopaediaMetallum, | ||||||
|     Musify, |     Musify, | ||||||
|     YoutubeMusic, |     YoutubeMusic, | ||||||
|     Bandcamp |     Bandcamp | ||||||
|   | |||||||
| @@ -1,110 +0,0 @@ | |||||||
| from collections import defaultdict |  | ||||||
| from typing import Dict, List, Optional |  | ||||||
| import weakref |  | ||||||
|  |  | ||||||
| from .parents import DatabaseObject |  | ||||||
|  |  | ||||||
| """ |  | ||||||
| This is a cache for the objects, that et pulled out of the database. |  | ||||||
| This is necessary, to not have duplicate objects with the same id. |  | ||||||
|  |  | ||||||
| Using a cache that maps the ojects to their id has multiple benefits: |  | ||||||
|  - if you modify the object at any point, all objects with the same id get modified *(copy by reference)* |  | ||||||
|  - less ram usage |  | ||||||
|  - to further decrease ram usage I only store weak refs and not a strong reference, for the gc to still work |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ObjectCache: |  | ||||||
|     """ |  | ||||||
|     ObjectCache is a cache for the objects retrieved from a database. |  | ||||||
|     It maps each object to its id and uses weak references to manage its memory usage. |  | ||||||
|     Using a cache for these objects provides several benefits: |  | ||||||
|  |  | ||||||
|     - Modifying an object updates all objects with the same id (due to copy by reference) |  | ||||||
|     - Reduced memory usage |  | ||||||
|  |  | ||||||
|     :attr object_to_id: Dictionary that maps DatabaseObjects to their id. |  | ||||||
|     :attr weakref_map: Dictionary that uses weak references to DatabaseObjects as keys and their id as values. |  | ||||||
|  |  | ||||||
|     :method exists: Check if a DatabaseObject already exists in the cache. |  | ||||||
|     :method append: Add a DatabaseObject to the cache if it does not already exist. |  | ||||||
|     :method extent: Add a list of DatabaseObjects to the cache. |  | ||||||
|     :method remove: Remove a DatabaseObject from the cache by its id. |  | ||||||
|     :method get: Retrieve a DatabaseObject from the cache by its id.    """ |  | ||||||
|     object_to_id: Dict[str, DatabaseObject] |  | ||||||
|     weakref_map: Dict[weakref.ref, str] |  | ||||||
|  |  | ||||||
|     def __init__(self) -> None: |  | ||||||
|         self.object_to_id = dict() |  | ||||||
|         self.weakref_map = defaultdict() |  | ||||||
|  |  | ||||||
|     def exists(self, database_object: DatabaseObject) -> bool: |  | ||||||
|         """ |  | ||||||
|         Check if a DatabaseObject with the same id already exists in the cache. |  | ||||||
|  |  | ||||||
|         :param database_object: The DatabaseObject to check for. |  | ||||||
|         :return: True if the DatabaseObject exists, False otherwise. |  | ||||||
|         """ |  | ||||||
|         if database_object.dynamic: |  | ||||||
|             return True |  | ||||||
|         return database_object.id in self.object_to_id |  | ||||||
|  |  | ||||||
|     def on_death(self, weakref_: weakref.ref) -> None: |  | ||||||
|         """ |  | ||||||
|         Callback function that gets triggered when the reference count of a DatabaseObject drops to 0. |  | ||||||
|         This function removes the DatabaseObject from the cache. |  | ||||||
|  |  | ||||||
|         :param weakref_: The weak reference of the DatabaseObject that has been garbage collected. |  | ||||||
|         """ |  | ||||||
|         data_id = self.weakref_map.pop(weakref_) |  | ||||||
|         self.object_to_id.pop(data_id) |  | ||||||
|          |  | ||||||
|     def get_weakref(self, database_object: DatabaseObject) -> weakref.ref: |  | ||||||
|         return weakref.ref(database_object, self.on_death) |  | ||||||
|          |  | ||||||
|  |  | ||||||
|     def append(self, database_object: DatabaseObject) -> bool: |  | ||||||
|         """ |  | ||||||
|         Add a DatabaseObject to the cache. |  | ||||||
|  |  | ||||||
|         :param database_object: The DatabaseObject to add to the cache. |  | ||||||
|         :return: True if the DatabaseObject already exists in the cache, False otherwise. |  | ||||||
|         """ |  | ||||||
|         if self.exists(database_object): |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         self.weakref_map[weakref.ref(database_object, self.on_death)] = database_object.id |  | ||||||
|         self.object_to_id[database_object.id] = database_object |  | ||||||
|  |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     def extent(self, database_object_list: List[DatabaseObject]): |  | ||||||
|         """ |  | ||||||
|         adjacent to the extent method of list, this appends n Object |  | ||||||
|         """ |  | ||||||
|         for database_object in database_object_list: |  | ||||||
|             self.append(database_object) |  | ||||||
|  |  | ||||||
|     def remove(self, _id: str): |  | ||||||
|         """ |  | ||||||
|         Remove a DatabaseObject from the cache. |  | ||||||
|  |  | ||||||
|         :param _id: The id of the DatabaseObject to remove from the cache. |  | ||||||
|         """ |  | ||||||
|         data = self.object_to_id.get(_id) |  | ||||||
|         if data: |  | ||||||
|             self.weakref_map.pop(weakref.ref(data)) |  | ||||||
|             self.object_to_id.pop(_id) |  | ||||||
|  |  | ||||||
|     def __getitem__(self, item) -> Optional[DatabaseObject]: |  | ||||||
|         """ |  | ||||||
|         this returns the data obj |  | ||||||
|         :param item: the id of the music object |  | ||||||
|         :return: |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         return self.object_to_id.get(item) |  | ||||||
|  |  | ||||||
|     def get(self, _id: str) -> Optional[DatabaseObject]: |  | ||||||
|         return self.__getitem__(_id) |  | ||||||
| @@ -1,8 +1,9 @@ | |||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| from typing import TypeVar, Generic, Dict, Optional, Iterable, List, Iterator, Tuple, Generator, Union | from typing import TypeVar, Generic, Dict, Optional, Iterable, List, Iterator, Tuple, Generator, Union, Any | ||||||
| from .parents import OuterProxy | from .parents import OuterProxy | ||||||
|  | from ..utils import object_trace | ||||||
|  |  | ||||||
| T = TypeVar('T', bound=OuterProxy) | T = TypeVar('T', bound=OuterProxy) | ||||||
|  |  | ||||||
| @@ -21,186 +22,62 @@ class Collection(Generic[T]): | |||||||
|             self, |             self, | ||||||
|             data: Optional[Iterable[T]] = None, |             data: Optional[Iterable[T]] = None, | ||||||
|             sync_on_append: Dict[str, Collection] = None, |             sync_on_append: Dict[str, Collection] = None, | ||||||
|             contain_given_in_attribute: Dict[str, Collection] = None, |             append_object_to_attribute: Dict[str, T] = None, | ||||||
|             contain_attribute_in_given: Dict[str, Collection] = None, |             extend_object_to_attribute: Dict[str, Collection] = None, | ||||||
|             append_object_to_attribute: Dict[str, T] = None |  | ||||||
|     ) -> None: |     ) -> None: | ||||||
|  |         self._collection_for: dict = dict() | ||||||
|  |  | ||||||
|         self._contains_ids = set() |         self._contains_ids = set() | ||||||
|         self._data = [] |         self._data = [] | ||||||
|  |  | ||||||
|         self.parents: List[Collection[T]] = [] |  | ||||||
|         self.children: List[Collection[T]] = [] |  | ||||||
|  |  | ||||||
|         # List of collection attributes that should be modified on append |         # List of collection attributes that should be modified on append | ||||||
|         # Key: collection attribute (str) of appended element |         # Key: collection attribute (str) of appended element | ||||||
|         # Value: main collection to sync to |         # Value: main collection to sync to | ||||||
|         self.contain_given_in_attribute: Dict[str, Collection] = contain_given_in_attribute or {} |  | ||||||
|         self.append_object_to_attribute: Dict[str, T] = append_object_to_attribute or {} |         self.append_object_to_attribute: Dict[str, T] = append_object_to_attribute or {} | ||||||
|  |         self.extend_object_to_attribute: Dict[str, Collection[T]] = extend_object_to_attribute or {} | ||||||
|         self.sync_on_append: Dict[str, Collection] = sync_on_append or {} |         self.sync_on_append: Dict[str, Collection] = sync_on_append or {} | ||||||
|  |  | ||||||
|         self._id_to_index_values: Dict[int, set] = defaultdict(set) |         self._id_to_index_values: Dict[int, set] = defaultdict(set) | ||||||
|         self._indexed_values = defaultdict(lambda: None) |          | ||||||
|         self._indexed_to_objects = defaultdict(lambda: None) |         # This is to cleanly unmap previously mapped items by their id | ||||||
|  |         self._indexed_from_id: Dict[int, Dict[str, Any]] = defaultdict(dict) | ||||||
|  |         # this is to keep track and look up the actual objects | ||||||
|  |         self._indexed_values: Dict[str, Dict[Any, T]] = defaultdict(dict) | ||||||
|  |  | ||||||
|         self.extend(data) |         self.extend(data) | ||||||
|  |  | ||||||
|     def _map_element(self, __object: T, from_map: bool = False): |     def __repr__(self) -> str: | ||||||
|         self._contains_ids.add(__object.id) |         return f"Collection({id(self)})" | ||||||
|  |  | ||||||
|         for name, value in (*__object.indexing_values, ('id', __object.id)): |     def _map_element(self, __object: T, from_map: bool = False): | ||||||
|  |         self._unmap_element(__object.id) | ||||||
|  |  | ||||||
|  |         self._indexed_from_id[__object.id]["id"] = __object.id | ||||||
|  |         self._indexed_values["id"][__object.id] = __object | ||||||
|  |  | ||||||
|  |         for name, value in __object.indexing_values: | ||||||
|             if value is None or value == __object._inner._default_values.get(name): |             if value is None or value == __object._inner._default_values.get(name): | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             self._indexed_values[name] = value |             self._indexed_values[name][value] = __object | ||||||
|             self._indexed_to_objects[value] = __object |             self._indexed_from_id[__object.id][name] = value | ||||||
|  |  | ||||||
|             self._id_to_index_values[__object.id].add((name, value)) |  | ||||||
|  |  | ||||||
|     def _unmap_element(self, __object: Union[T, int]): |     def _unmap_element(self, __object: Union[T, int]): | ||||||
|         obj_id = __object.id if isinstance(__object, OuterProxy) else __object |         obj_id = __object.id if isinstance(__object, OuterProxy) else __object | ||||||
|  |  | ||||||
|         if obj_id in self._contains_ids: |         if obj_id not in self._indexed_from_id: | ||||||
|             self._contains_ids.remove(obj_id) |  | ||||||
|  |  | ||||||
|         for name, value in self._id_to_index_values[obj_id]: |  | ||||||
|             if name in self._indexed_values: |  | ||||||
|                 del self._indexed_values[name] |  | ||||||
|             if value in self._indexed_to_objects: |  | ||||||
|                 del self._indexed_to_objects[value] |  | ||||||
|  |  | ||||||
|         del self._id_to_index_values[obj_id] |  | ||||||
|  |  | ||||||
|     def _contained_in_self(self, __object: T) -> bool: |  | ||||||
|         if __object.id in self._contains_ids: |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         for name, value in __object.indexing_values: |  | ||||||
|             if value is None: |  | ||||||
|                 continue |  | ||||||
|             if value == self._indexed_values[name]: |  | ||||||
|                 return True |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     def _contained_in_sub(self, __object: T, break_at_first: bool = True) -> List[Collection]: |  | ||||||
|         """ |  | ||||||
|         Gets the collection this object is found in, if it is found in any. |  | ||||||
|  |  | ||||||
|         :param __object: |  | ||||||
|         :param break_at_first: |  | ||||||
|         :return: |  | ||||||
|         """ |  | ||||||
|         results = [] |  | ||||||
|  |  | ||||||
|         if self._contained_in_self(__object): |  | ||||||
|             return [self] |  | ||||||
|  |  | ||||||
|         for collection in self.children: |  | ||||||
|             results.extend(collection._contained_in_sub(__object, break_at_first=break_at_first)) |  | ||||||
|  |  | ||||||
|             if break_at_first: |  | ||||||
|                 return results |  | ||||||
|  |  | ||||||
|         return results |  | ||||||
|  |  | ||||||
|     def _get_root_collections(self) -> List[Collection]: |  | ||||||
|         if not len(self.parents): |  | ||||||
|             return [self] |  | ||||||
|  |  | ||||||
|         root_collections = [] |  | ||||||
|         for upper_collection in self.parents: |  | ||||||
|             root_collections.extend(upper_collection._get_root_collections()) |  | ||||||
|         return root_collections |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def _is_root(self) -> bool: |  | ||||||
|         return len(self.parents) <= 0 |  | ||||||
|  |  | ||||||
|     def _get_parents_of_multiple_contained_children(self, __object: T): |  | ||||||
|         results = [] |  | ||||||
|         if len(self.children) < 2 or self._contained_in_self(__object): |  | ||||||
|             return results |  | ||||||
|  |  | ||||||
|         count = 0 |  | ||||||
|  |  | ||||||
|         for collection in self.children: |  | ||||||
|             sub_results = collection._get_parents_of_multiple_contained_children(__object) |  | ||||||
|  |  | ||||||
|             if len(sub_results) > 0: |  | ||||||
|                 count += 1 |  | ||||||
|                 results.extend(sub_results) |  | ||||||
|  |  | ||||||
|         if count >= 2: |  | ||||||
|             results.append(self) |  | ||||||
|  |  | ||||||
|         return results |  | ||||||
|  |  | ||||||
|     def merge_into_self(self, __object: T, from_map: bool = False): |  | ||||||
|         """ |  | ||||||
|         1. find existing objects |  | ||||||
|         2. merge into existing object |  | ||||||
|         3. remap existing object |  | ||||||
|         """ |  | ||||||
|         if __object.id in self._contains_ids: |  | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         existing_object: T = None |         for name, value in self._indexed_from_id[obj_id].items(): | ||||||
|  |             if value in self._indexed_values[name]: | ||||||
|  |                 del self._indexed_values[name][value] | ||||||
|  |  | ||||||
|  |         del self._indexed_from_id[obj_id] | ||||||
|  |  | ||||||
|  |     def _find_object(self, __object: T) -> Optional[T]: | ||||||
|         for name, value in __object.indexing_values: |         for name, value in __object.indexing_values: | ||||||
|             if value is None: |             if value in self._indexed_values[name]: | ||||||
|                 continue |                 return self._indexed_values[name][value] | ||||||
|  |  | ||||||
|             if value == self._indexed_values[name]: |  | ||||||
|                 existing_object = self._indexed_to_objects[value] |  | ||||||
|                 if existing_object.id == __object.id: |  | ||||||
|                     return None |  | ||||||
|  |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|         if existing_object is None: |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|         existing_object.merge(__object) |  | ||||||
|  |  | ||||||
|         # just a check if it really worked |  | ||||||
|         if existing_object.id != __object.id: |  | ||||||
|             raise ValueError("This should NEVER happen. Merging doesn't work.") |  | ||||||
|  |  | ||||||
|         self._map_element(existing_object, from_map=from_map) |  | ||||||
|  |  | ||||||
|     def contains(self, __object: T) -> bool: |  | ||||||
|         return len(self._contained_in_sub(__object)) > 0 |  | ||||||
|  |  | ||||||
|     def _find_object_in_self(self, __object: T) -> Optional[T]: |  | ||||||
|         for name, value in __object.indexing_values: |  | ||||||
|             if value == self._indexed_values[name]: |  | ||||||
|                 return self._indexed_to_objects[value] |  | ||||||
|  |  | ||||||
|     def _find_object(self, __object: T, no_sibling: bool = False) -> Tuple[Collection[T], Optional[T]]: |  | ||||||
|         other_object = self._find_object_in_self(__object) |  | ||||||
|         if other_object is not None: |  | ||||||
|             return self, other_object |  | ||||||
|  |  | ||||||
|         for c in self.children: |  | ||||||
|             o, other_object = c._find_object(__object) |  | ||||||
|             if other_object is not None: |  | ||||||
|                 return o, other_object |  | ||||||
|  |  | ||||||
|         if no_sibling: |  | ||||||
|             return self, None |  | ||||||
|  |  | ||||||
|         """ |  | ||||||
|         # find in siblings and all children of siblings |  | ||||||
|         for parent in self.parents: |  | ||||||
|             for sibling in parent.children: |  | ||||||
|                 if sibling is self: |  | ||||||
|                     continue |  | ||||||
|  |  | ||||||
|                 o, other_object = sibling._find_object(__object, no_sibling=True) |  | ||||||
|                 if other_object is not None: |  | ||||||
|                     return o, other_object |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         return self, None |  | ||||||
|  |  | ||||||
|     def append(self, __object: Optional[T], already_is_parent: bool = False, from_map: bool = False): |     def append(self, __object: Optional[T], already_is_parent: bool = False, from_map: bool = False): | ||||||
|         """ |         """ | ||||||
| @@ -217,23 +94,32 @@ class Collection(Generic[T]): | |||||||
|         if __object is None: |         if __object is None: | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         append_to, existing_object = self._find_object(__object) |         existing_object = self._find_object(__object) | ||||||
|  |  | ||||||
|         if existing_object is None: |         if existing_object is None: | ||||||
|             # append |             # append | ||||||
|             append_to._data.append(__object) |             self._data.append(__object) | ||||||
|             append_to._map_element(__object) |             self._map_element(__object) | ||||||
|  |  | ||||||
|             # only modify collections if the object actually has been appended |             for collection_attribute, child_collection in self.extend_object_to_attribute.items(): | ||||||
|             for collection_attribute, child_collection in self.contain_given_in_attribute.items(): |                 __object.__getattribute__(collection_attribute).extend(child_collection) | ||||||
|                 __object.__getattribute__(collection_attribute).contain_collection_inside(child_collection, __object) |  | ||||||
|  |  | ||||||
|             for attribute, new_object in self.append_object_to_attribute.items(): |             for attribute, new_object in self.append_object_to_attribute.items(): | ||||||
|                 __object.__getattribute__(attribute).append(new_object) |                 __object.__getattribute__(attribute).append(new_object) | ||||||
|  |  | ||||||
|             for attribute, collection in self.sync_on_append.items(): |             # only modify collections if the object actually has been appended | ||||||
|                 collection.extend(__object.__getattribute__(attribute)) |             for attribute, a in self.sync_on_append.items(): | ||||||
|                 __object.__setattr__(attribute, collection) |                 b = __object.__getattribute__(attribute) | ||||||
|  |                 object_trace(f"Syncing [{a}{id(a)}] = [{b}{id(b)}]") | ||||||
|  |  | ||||||
|  |                 data_to_extend = b.data | ||||||
|  |  | ||||||
|  |                 a._collection_for.update(b._collection_for) | ||||||
|  |                 for synced_with, key in b._collection_for.items(): | ||||||
|  |                     synced_with.__setattr__(key, a) | ||||||
|  |  | ||||||
|  |                 a.extend(data_to_extend) | ||||||
|  |  | ||||||
|  |  | ||||||
|         else: |         else: | ||||||
|             # merge only if the two objects are not the same |             # merge only if the two objects are not the same | ||||||
| @@ -245,9 +131,9 @@ class Collection(Generic[T]): | |||||||
|             existing_object.merge(__object) |             existing_object.merge(__object) | ||||||
|  |  | ||||||
|             if existing_object.id != old_id: |             if existing_object.id != old_id: | ||||||
|                 append_to._unmap_element(old_id) |                 self._unmap_element(old_id) | ||||||
|  |  | ||||||
|             append_to._map_element(existing_object) |             self._map_element(existing_object)             | ||||||
|  |  | ||||||
|     def extend(self, __iterable: Optional[Generator[T, None, None]]): |     def extend(self, __iterable: Optional[Generator[T, None, None]]): | ||||||
|         if __iterable is None: |         if __iterable is None: | ||||||
| @@ -256,54 +142,27 @@ class Collection(Generic[T]): | |||||||
|         for __object in __iterable: |         for __object in __iterable: | ||||||
|             self.append(__object) |             self.append(__object) | ||||||
|  |  | ||||||
|     def contain_collection_inside(self, sub_collection: Collection, _object: T): |  | ||||||
|         """ |  | ||||||
|         This collection will ALWAYS contain everything from the passed in collection |  | ||||||
|         """ |  | ||||||
|         if self is sub_collection or sub_collection in self.children: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         _object._inner._is_collection_child[self] = sub_collection |  | ||||||
|         _object._inner._is_collection_parent[sub_collection] = self |  | ||||||
|  |  | ||||||
|         self.children.append(sub_collection) |  | ||||||
|         sub_collection.parents.append(self) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def data(self) -> List[T]: |     def data(self) -> List[T]: | ||||||
|         return list(self.__iter__()) |         return list(self.__iter__()) | ||||||
|  |  | ||||||
|     def __len__(self) -> int: |     def __len__(self) -> int: | ||||||
|         return len(self._data) + sum(len(collection) for collection in self.children) |         return len(self._data) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self) -> bool: |     def empty(self) -> bool: | ||||||
|         return self.__len__() <= 0 |         return self.__len__() <= 0 | ||||||
|  |  | ||||||
|     def __iter__(self, finished_ids: set = None) -> Iterator[T]: |     def __iter__(self) -> Iterator[T]: | ||||||
|         _finished_ids = finished_ids or set() |         yield from self._data | ||||||
|  |  | ||||||
|         for element in self._data: |  | ||||||
|             if element.id in _finished_ids: |  | ||||||
|                 continue |  | ||||||
|             _finished_ids.add(element.id) |  | ||||||
|             yield element |  | ||||||
|  |  | ||||||
|         for c in self.children: |  | ||||||
|             yield from c.__iter__(finished_ids=finished_ids) |  | ||||||
|  |  | ||||||
|     def __merge__(self, __other: Collection, override: bool = False): |     def __merge__(self, __other: Collection, override: bool = False): | ||||||
|         self.extend(__other) |         self.extend(__other) | ||||||
|  |  | ||||||
|     def __getitem__(self, item: int): |     def __getitem__(self, item: int): | ||||||
|         if item < len(self._data): |  | ||||||
|         return self._data[item] |         return self._data[item] | ||||||
|  |  | ||||||
|         item = item - len(self._data) |     def get(self, item: int, default = None): | ||||||
|  |         if item >= len(self._data): | ||||||
|         for c in self.children: |             return default | ||||||
|             if item < len(c): |         return self._data[item] | ||||||
|                 return c.__getitem__(item) |  | ||||||
|             item = item - len(c._data) |  | ||||||
|  |  | ||||||
|         raise IndexError |  | ||||||
|   | |||||||
| @@ -1,5 +1,10 @@ | |||||||
| import mistune | import mistune | ||||||
| import html2markdown | from markdownify import markdownify as md | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def plain_to_markdown(plain: str) -> str: | ||||||
|  |     return plain.replace("\n", "  \n") | ||||||
|  |  | ||||||
|  |  | ||||||
| class FormattedText:     | class FormattedText:     | ||||||
|     html = "" |     html = "" | ||||||
| @@ -7,12 +12,15 @@ class FormattedText: | |||||||
|     def __init__( |     def __init__( | ||||||
|             self, |             self, | ||||||
|             markdown: str = None, |             markdown: str = None, | ||||||
|             html: str = None |             html: str = None, | ||||||
|  |             plain: str = None, | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         if html is not None: |         if html is not None: | ||||||
|             self.html = html |             self.html = html | ||||||
|         elif markdown is not None: |         elif markdown is not None: | ||||||
|             self.html = mistune.markdown(markdown) |             self.html = mistune.markdown(markdown) | ||||||
|  |         elif plain is not None: | ||||||
|  |             self.html = mistune.markdown(plain_to_markdown(plain)) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def is_empty(self) -> bool: |     def is_empty(self) -> bool: | ||||||
| @@ -28,7 +36,7 @@ class FormattedText: | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def markdown(self) -> str: |     def markdown(self) -> str: | ||||||
|         return html2markdown.convert(self.html) |         return md(self.html).strip() | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return self.markdown |         return self.markdown | ||||||
|   | |||||||
| @@ -34,6 +34,6 @@ class Lyrics(OuterProxy): | |||||||
|     @property |     @property | ||||||
|     def metadata(self) -> Metadata: |     def metadata(self) -> Metadata: | ||||||
|         return Metadata({ |         return Metadata({ | ||||||
|             id3Mapping.UNSYNCED_LYRICS: [self.text.html] |             id3Mapping.UNSYNCED_LYRICS: [self.text.markdown] | ||||||
|         }) |         }) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -32,10 +32,7 @@ class InnerData: | |||||||
|  |  | ||||||
|     def __init__(self, object_type, **kwargs): |     def __init__(self, object_type, **kwargs): | ||||||
|         self._refers_to_instances = set() |         self._refers_to_instances = set() | ||||||
|  |         self._fetched_from: dict = {} | ||||||
|         # collection : collection that is a collection of self |  | ||||||
|         self._is_collection_child: Dict[Collection, Collection] = {} |  | ||||||
|         self._is_collection_parent: Dict[Collection, Collection] = {} |  | ||||||
|  |  | ||||||
|         # initialize the default values |         # initialize the default values | ||||||
|         self._default_values = {} |         self._default_values = {} | ||||||
| @@ -43,8 +40,13 @@ class InnerData: | |||||||
|             self._default_values[name] = factory() |             self._default_values[name] = factory() | ||||||
|  |  | ||||||
|         for key, value in kwargs.items(): |         for key, value in kwargs.items(): | ||||||
|  |             if hasattr(value, "__is_collection__"): | ||||||
|  |                 value._collection_for[self] = key | ||||||
|             self.__setattr__(key, value) |             self.__setattr__(key, value) | ||||||
|  |  | ||||||
|  |     def __hash__(self): | ||||||
|  |         return self.id | ||||||
|  |  | ||||||
|     def __merge__(self, __other: InnerData, override: bool = False): |     def __merge__(self, __other: InnerData, override: bool = False): | ||||||
|         """ |         """ | ||||||
|         :param __other: |         :param __other: | ||||||
| @@ -52,6 +54,8 @@ class InnerData: | |||||||
|         :return: |         :return: | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|  |         self._fetched_from.update(__other._fetched_from) | ||||||
|  |  | ||||||
|         for key, value in __other.__dict__.copy().items(): |         for key, value in __other.__dict__.copy().items(): | ||||||
|             # just set the other value if self doesn't already have it |             # just set the other value if self doesn't already have it | ||||||
|             if key not in self.__dict__ or (key in self.__dict__ and self.__dict__[key] == self._default_values.get(key)): |             if key not in self.__dict__ or (key in self.__dict__ and self.__dict__[key] == self._default_values.get(key)): | ||||||
| @@ -85,7 +89,7 @@ class OuterProxy: | |||||||
|     def __init__(self, _id: int = None, dynamic: bool = False, **kwargs): |     def __init__(self, _id: int = None, dynamic: bool = False, **kwargs): | ||||||
|         _automatic_id: bool = False |         _automatic_id: bool = False | ||||||
|  |  | ||||||
|         if _id is None and not dynamic: |         if _id is None: | ||||||
|             """ |             """ | ||||||
|             generates a random integer id |             generates a random integer id | ||||||
|             the range is defined in the config |             the range is defined in the config | ||||||
| @@ -109,11 +113,11 @@ class OuterProxy: | |||||||
|  |  | ||||||
|                 del kwargs[name] |                 del kwargs[name] | ||||||
|  |  | ||||||
|         self._fetched_from: dict = {} |  | ||||||
|         self._inner: InnerData = InnerData(type(self), **kwargs) |         self._inner: InnerData = InnerData(type(self), **kwargs) | ||||||
|         self._inner._refers_to_instances.add(self) |         self._inner._refers_to_instances.add(self) | ||||||
|  |  | ||||||
|         object_trace(f"creating {type(self).__name__} [{self.title_string}]") |         object_trace(f"creating {type(self).__name__} [{self.title_string}]") | ||||||
|  |  | ||||||
|         self.__init_collections__() |         self.__init_collections__() | ||||||
|  |  | ||||||
|         for name, data_list in collection_data.items(): |         for name, data_list in collection_data.items(): | ||||||
| @@ -192,19 +196,7 @@ class OuterProxy: | |||||||
|         if len(b._inner._refers_to_instances) > len(a._inner._refers_to_instances): |         if len(b._inner._refers_to_instances) > len(a._inner._refers_to_instances): | ||||||
|             a, b = b, a |             a, b = b, a | ||||||
|  |  | ||||||
|         object_trace(f"merging {type(a).__name__} [{a.title_string} | {a.id}] with {type(b).__name__} [{b.title_string} | {b.id}] called by [{' | '.join(f'{s.function} {Path(s.filename).name}:{str(s.lineno)}' for s in inspect.stack()[1:5])}]") |         object_trace(f"merging {type(a).__name__} [{a.title_string} | {a.id}] with {type(b).__name__} [{b.title_string} | {b.id}]") | ||||||
|          |  | ||||||
|         for collection, child_collection in b._inner._is_collection_child.items(): |  | ||||||
|             try: |  | ||||||
|                 collection.children.remove(child_collection) |  | ||||||
|             except ValueError: |  | ||||||
|                 pass |  | ||||||
|              |  | ||||||
|         for collection, parent_collection in b._inner._is_collection_parent.items(): |  | ||||||
|             try: |  | ||||||
|                 collection.parents.remove(parent_collection) |  | ||||||
|             except ValueError: |  | ||||||
|                 pass |  | ||||||
|  |  | ||||||
|         old_inner = b._inner |         old_inner = b._inner | ||||||
|  |  | ||||||
| @@ -220,13 +212,13 @@ class OuterProxy: | |||||||
|  |  | ||||||
|     def mark_as_fetched(self, *url_hash_list: List[str]): |     def mark_as_fetched(self, *url_hash_list: List[str]): | ||||||
|         for url_hash in url_hash_list: |         for url_hash in url_hash_list: | ||||||
|             self._fetched_from[url_hash] = { |             self._inner._fetched_from[url_hash] = { | ||||||
|                 "time": get_unix_time(), |                 "time": get_unix_time(), | ||||||
|                 "url": url_hash, |                 "url": url_hash, | ||||||
|             } |             } | ||||||
|  |  | ||||||
|     def already_fetched_from(self, url_hash: str) -> bool: |     def already_fetched_from(self, url_hash: str) -> bool: | ||||||
|         res = self._fetched_from.get(url_hash, None) |         res = self._inner._fetched_from.get(url_hash, None) | ||||||
|  |  | ||||||
|         if res is None: |         if res is None: | ||||||
|             return False |             return False | ||||||
| @@ -275,9 +267,9 @@ class OuterProxy: | |||||||
|  |  | ||||||
|         return r |         return r | ||||||
|  |  | ||||||
|     def __repr__(self): |  | ||||||
|         return f"{type(self).__name__}({', '.join(key + ': ' + str(val) for key, val in self.indexing_values)})" |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def title_string(self) -> str: |     def title_string(self) -> str: | ||||||
|         return str(self.__getattribute__(self.TITEL)) |         return str(self.__getattribute__(self.TITEL)) | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return f"{type(self).__name__}({self.title_string})" | ||||||
|   | |||||||
| @@ -27,14 +27,49 @@ from ..utils.string_processing import unify | |||||||
| from .parents import OuterProxy as Base | from .parents import OuterProxy as Base | ||||||
|  |  | ||||||
| from ..utils.config import main_settings | from ..utils.config import main_settings | ||||||
|  | from ..utils.enums.colors import BColors | ||||||
|  |  | ||||||
| """ | """ | ||||||
| All Objects dependent  | All Objects dependent  | ||||||
| """ | """ | ||||||
|  |  | ||||||
| CountryTyping = type(list(pycountry.countries)[0]) | CountryTyping = type(list(pycountry.countries)[0]) | ||||||
| OPTION_STRING_DELIMITER = " | " |  | ||||||
|  |  | ||||||
|  | OPTION_BACKGROUND = BColors.GREY | ||||||
|  | OPTION_FOREGROUND = BColors.OKBLUE | ||||||
|  |  | ||||||
|  | def get_collection_string( | ||||||
|  |     collection: Collection[Base],  | ||||||
|  |     template: str,  | ||||||
|  |     ignore_titles: Set[str] = None, | ||||||
|  |     background: BColors = OPTION_BACKGROUND,  | ||||||
|  |     foreground: BColors = OPTION_FOREGROUND | ||||||
|  | ) -> str: | ||||||
|  |     if collection.empty: | ||||||
|  |         return "" | ||||||
|  |  | ||||||
|  |     foreground = foreground.value | ||||||
|  |     background = background.value | ||||||
|  |  | ||||||
|  |     ignore_titles = ignore_titles or set() | ||||||
|  |  | ||||||
|  |     r = background | ||||||
|  |  | ||||||
|  |     element: Base | ||||||
|  |     titel_list: List[str] = [element.title_string.strip() for element in collection if element.title_string not in ignore_titles] | ||||||
|  |  | ||||||
|  |     for i, titel in enumerate(titel_list): | ||||||
|  |         delimiter = ", " | ||||||
|  |         if i == len(collection) - 1: | ||||||
|  |             delimiter = "" | ||||||
|  |         elif i == len(collection) - 2: | ||||||
|  |             delimiter = " and " | ||||||
|  |  | ||||||
|  |         r += foreground + titel + BColors.ENDC.value + background + delimiter + BColors.ENDC.value | ||||||
|  |  | ||||||
|  |     r += BColors.ENDC.value | ||||||
|  |  | ||||||
|  |     return template.format(r) | ||||||
|  |  | ||||||
| class Song(Base): | class Song(Base): | ||||||
|     title: str |     title: str | ||||||
| @@ -86,11 +121,6 @@ class Song(Base): | |||||||
|     TITEL = "title" |     TITEL = "title" | ||||||
|  |  | ||||||
|     def __init_collections__(self) -> None: |     def __init_collections__(self) -> None: | ||||||
|         """ |  | ||||||
|         self.album_collection.contain_given_in_attribute = { |  | ||||||
|             "artist_collection": self.main_artist_collection, |  | ||||||
|         } |  | ||||||
|         """ |  | ||||||
|         self.album_collection.sync_on_append = { |         self.album_collection.sync_on_append = { | ||||||
|             "artist_collection": self.main_artist_collection, |             "artist_collection": self.main_artist_collection, | ||||||
|         } |         } | ||||||
| @@ -98,8 +128,7 @@ class Song(Base): | |||||||
|         self.album_collection.append_object_to_attribute = { |         self.album_collection.append_object_to_attribute = { | ||||||
|             "song_collection": self, |             "song_collection": self, | ||||||
|         } |         } | ||||||
|  |         self.main_artist_collection.extend_object_to_attribute = { | ||||||
|         self.main_artist_collection.contain_given_in_attribute = { |  | ||||||
|             "main_album_collection": self.album_collection |             "main_album_collection": self.album_collection | ||||||
|         } |         } | ||||||
|         self.feature_artist_collection.append_object_to_attribute = { |         self.feature_artist_collection.append_object_to_attribute = { | ||||||
| @@ -126,7 +155,7 @@ class Song(Base): | |||||||
|     def indexing_values(self) -> List[Tuple[str, object]]: |     def indexing_values(self) -> List[Tuple[str, object]]: | ||||||
|         return [ |         return [ | ||||||
|             ('id', self.id), |             ('id', self.id), | ||||||
|             ('title', unify(self.unified_title)), |             ('title', unify(self.title)), | ||||||
|             ('isrc', self.isrc), |             ('isrc', self.isrc), | ||||||
|             *[('url', source.url) for source in self.source_collection] |             *[('url', source.url) for source in self.source_collection] | ||||||
|         ] |         ] | ||||||
| @@ -158,18 +187,12 @@ class Song(Base): | |||||||
|             return main_artists |             return main_artists | ||||||
|         return f"{main_artists} feat. {feature_artists}" |         return f"{main_artists} feat. {feature_artists}" | ||||||
|  |  | ||||||
|     def __repr__(self) -> str: |  | ||||||
|         return f"Song(\"{self.title}\")" |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def option_string(self) -> str: |     def option_string(self) -> str: | ||||||
|         r = f"{self.__repr__()}" |         r = OPTION_FOREGROUND.value + self.title + BColors.ENDC.value + OPTION_BACKGROUND.value | ||||||
|         if not self.album_collection.empty: |         r += get_collection_string(self.album_collection, " from {}", ignore_titles={self.title}) | ||||||
|             r += f" from Album({OPTION_STRING_DELIMITER.join(album.title for album in self.album_collection)})"  |         r += get_collection_string(self.main_artist_collection, " by {}") | ||||||
|         if not self.main_artist_collection.empty: |         r += get_collection_string(self.feature_artist_collection, " feat. {}") | ||||||
|             r += f" by Artist({OPTION_STRING_DELIMITER.join(artist.name for artist in self.main_artist_collection)})"  |  | ||||||
|         if not self.feature_artist_collection.empty: |  | ||||||
|             r += f" feat. Artist({OPTION_STRING_DELIMITER.join(artist.name for artist in self.feature_artist_collection)})" |  | ||||||
|         return r |         return r | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -209,6 +232,7 @@ class Album(Base): | |||||||
|     notes: FormattedText |     notes: FormattedText | ||||||
|  |  | ||||||
|     source_collection: SourceCollection |     source_collection: SourceCollection | ||||||
|  |  | ||||||
|     artist_collection: Collection[Artist] |     artist_collection: Collection[Artist] | ||||||
|     song_collection: Collection[Song] |     song_collection: Collection[Song] | ||||||
|     label_collection: Collection[Label] |     label_collection: Collection[Label] | ||||||
| @@ -258,7 +282,7 @@ class Album(Base): | |||||||
|         self.artist_collection.append_object_to_attribute = { |         self.artist_collection.append_object_to_attribute = { | ||||||
|             "main_album_collection": self |             "main_album_collection": self | ||||||
|         } |         } | ||||||
|         self.artist_collection.contain_given_in_attribute = { |         self.artist_collection.extend_object_to_attribute = { | ||||||
|             "label_collection": self.label_collection |             "label_collection": self.label_collection | ||||||
|         } |         } | ||||||
|  |  | ||||||
| @@ -307,14 +331,15 @@ class Album(Base): | |||||||
|             id3Mapping.ALBUMSORTORDER: [str(self.albumsort)] if self.albumsort is not None else [] |             id3Mapping.ALBUMSORTORDER: [str(self.albumsort)] if self.albumsort is not None else [] | ||||||
|         }) |         }) | ||||||
|  |  | ||||||
|     def __repr__(self): |  | ||||||
|         return f"Album(\"{self.title}\")" |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def option_string(self) -> str: |     def option_string(self) -> str: | ||||||
|         return f"{self.__repr__()} " \ |         r = OPTION_FOREGROUND.value + self.title + BColors.ENDC.value + OPTION_BACKGROUND.value | ||||||
|                f"by Artist({OPTION_STRING_DELIMITER.join([artist.name + str(artist.id) for artist in self.artist_collection])}) " \ |         r += get_collection_string(self.artist_collection, " by {}") | ||||||
|                f"under Label({OPTION_STRING_DELIMITER.join([label.name for label in self.label_collection])})" |         r += get_collection_string(self.label_collection, " under {}") | ||||||
|  |  | ||||||
|  |         if len(self.song_collection) > 0: | ||||||
|  |             r += f" with {len(self.song_collection)} songs" | ||||||
|  |         return r | ||||||
|          |          | ||||||
|     @property |     @property | ||||||
|     def options(self) -> List[P]: |     def options(self) -> List[P]: | ||||||
| @@ -347,7 +372,6 @@ class Album(Base): | |||||||
|                 tracksort_map[i] = existing_list.pop(0) |                 tracksort_map[i] = existing_list.pop(0) | ||||||
|                 tracksort_map[i].tracksort = i |                 tracksort_map[i].tracksort = i | ||||||
|  |  | ||||||
|  |  | ||||||
|     def compile(self, merge_into: bool = False): |     def compile(self, merge_into: bool = False): | ||||||
|         """ |         """ | ||||||
|         compiles the recursive structures, |         compiles the recursive structures, | ||||||
| @@ -576,8 +600,18 @@ class Artist(Base): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def option_string(self) -> str: |     def option_string(self) -> str: | ||||||
|         return f"{self.__repr__()} " \ |         r = OPTION_FOREGROUND.value + self.name + BColors.ENDC.value + OPTION_BACKGROUND.value | ||||||
|                f"under Label({OPTION_STRING_DELIMITER.join([label.name for label in self.label_collection])})" |         r += get_collection_string(self.label_collection, " under {}") | ||||||
|  |          | ||||||
|  |         r += OPTION_BACKGROUND.value | ||||||
|  |         if len(self.main_album_collection) > 0: | ||||||
|  |             r += f" with {len(self.main_album_collection)} albums" | ||||||
|  |          | ||||||
|  |         if len(self.feature_song_collection) > 0: | ||||||
|  |             r += f" featured in {len(self.feature_song_collection)} songs" | ||||||
|  |         r += BColors.ENDC.value | ||||||
|  |  | ||||||
|  |         return r | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def options(self) -> List[P]: |     def options(self) -> List[P]: | ||||||
| @@ -695,4 +729,4 @@ class Label(Base): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def option_string(self): |     def option_string(self): | ||||||
|         return self.__repr__() |         return OPTION_FOREGROUND.value + self.name + BColors.ENDC.value | ||||||
|   | |||||||
| @@ -451,7 +451,7 @@ class Page: | |||||||
|         source = sources[0] |         source = sources[0] | ||||||
|  |  | ||||||
|         if not found_on_disc: |         if not found_on_disc: | ||||||
|             r = self.download_song_to_target(source=source, target=temp_target, desc=song.title) |             r = self.download_song_to_target(source=source, target=temp_target, desc=song.option_string) | ||||||
|  |  | ||||||
|         if not r.is_fatal_error: |         if not r.is_fatal_error: | ||||||
|             r.merge(self._post_process_targets(song, temp_target, |             r.merge(self._post_process_targets(song, temp_target, | ||||||
|   | |||||||
| @@ -18,10 +18,12 @@ from ..objects import ( | |||||||
|     Contact, |     Contact, | ||||||
|     ID3Timestamp, |     ID3Timestamp, | ||||||
|     Lyrics, |     Lyrics, | ||||||
|     FormattedText |     FormattedText, | ||||||
|  |     Artwork, | ||||||
| ) | ) | ||||||
| from ..connection import Connection | from ..connection import Connection | ||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.support_classes.download_result import DownloadResult | ||||||
|  | from ..utils.string_processing import clean_song_title | ||||||
| from ..utils.config import main_settings, logging_settings | from ..utils.config import main_settings, logging_settings | ||||||
| from ..utils.shared import DEBUG | from ..utils.shared import DEBUG | ||||||
|  |  | ||||||
| @@ -114,7 +116,7 @@ class Bandcamp(Page): | |||||||
|  |  | ||||||
|         if object_type is BandcampTypes.SONG: |         if object_type is BandcampTypes.SONG: | ||||||
|             return Song( |             return Song( | ||||||
|                 title=name.strip(), |                 title=clean_song_title(name, artist_name=data["band_name"]), | ||||||
|                 source_list=source_list, |                 source_list=source_list, | ||||||
|                 main_artist_list=[ |                 main_artist_list=[ | ||||||
|                     Artist( |                     Artist( | ||||||
| @@ -252,11 +254,18 @@ class Bandcamp(Page): | |||||||
|         artist.source_collection.append(source) |         artist.source_collection.append(source) | ||||||
|         return artist |         return artist | ||||||
|  |  | ||||||
|     def _parse_track_element(self, track: dict) -> Optional[Song]: |     def _parse_track_element(self, track: dict, artwork: Artwork) -> Optional[Song]: | ||||||
|  |         lyrics_list: List[Lyrics] = [] | ||||||
|  |  | ||||||
|  |         _lyrics: Optional[str] = track.get("item", {}).get("recordingOf", {}).get("lyrics", {}).get("text") | ||||||
|  |         if _lyrics is not None: | ||||||
|  |             lyrics_list.append(Lyrics(text=FormattedText(plain=_lyrics))) | ||||||
|  |  | ||||||
|         return Song( |         return Song( | ||||||
|             title=track["item"]["name"].strip(), |             title=clean_song_title(track["item"]["name"]), | ||||||
|             source_list=[Source(self.SOURCE_TYPE, track["item"]["mainEntityOfPage"])], |             source_list=[Source(self.SOURCE_TYPE, track["item"]["mainEntityOfPage"])], | ||||||
|             tracksort=int(track["position"]) |             tracksort=int(track["position"]), | ||||||
|  |             artwork=artwork, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def fetch_album(self, source: Source, stop_at_level: int = 1) -> Album: |     def fetch_album(self, source: Source, stop_at_level: int = 1) -> Album: | ||||||
| @@ -289,12 +298,32 @@ class Bandcamp(Page): | |||||||
|             )] |             )] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         artwork: Artwork = Artwork() | ||||||
|  |  | ||||||
|  |         def _get_artwork_url(_data: dict) -> Optional[str]: | ||||||
|  |             if "image" in _data: | ||||||
|  |                 return _data["image"] | ||||||
|  |             for _property in _data.get("additionalProperty", []): | ||||||
|  |                 if _property.get("name") == "art_id": | ||||||
|  |                     return f"https://f4.bcbits.com/img/a{_property.get('value')}_2.jpg" | ||||||
|  |  | ||||||
|  |         _artwork_url = _get_artwork_url(data) | ||||||
|  |         if _artwork_url is not None: | ||||||
|  |             artwork.append(url=_artwork_url, width=350, height=350) | ||||||
|  |         else: | ||||||
|  |             for album_release in data.get("albumRelease", []): | ||||||
|  |                 _artwork_url = _get_artwork_url(album_release) | ||||||
|  |                 if _artwork_url is not None: | ||||||
|  |                     artwork.append(url=_artwork_url, width=350, height=350) | ||||||
|  |                     break | ||||||
|  |  | ||||||
|  |  | ||||||
|         for i, track_json in enumerate(data.get("track", {}).get("itemListElement", [])): |         for i, track_json in enumerate(data.get("track", {}).get("itemListElement", [])): | ||||||
|             if DEBUG: |             if DEBUG: | ||||||
|                 dump_to_file(f"album_track_{i}.json", json.dumps(track_json), is_json=True, exit_after_dump=False) |                 dump_to_file(f"album_track_{i}.json", json.dumps(track_json), is_json=True, exit_after_dump=False) | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 album.song_collection.append(self._parse_track_element(track_json)) |                 album.song_collection.append(self._parse_track_element(track_json, artwork=artwork)) | ||||||
|             except KeyError: |             except KeyError: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
| @@ -304,7 +333,6 @@ class Bandcamp(Page): | |||||||
|     def _fetch_lyrics(self, soup: BeautifulSoup) -> List[Lyrics]: |     def _fetch_lyrics(self, soup: BeautifulSoup) -> List[Lyrics]: | ||||||
|         track_lyrics = soup.find("div", {"class": "lyricsText"}) |         track_lyrics = soup.find("div", {"class": "lyricsText"}) | ||||||
|         if track_lyrics: |         if track_lyrics: | ||||||
|             self.LOGGER.debug(" Lyrics retrieved..") |  | ||||||
|             return [Lyrics(text=FormattedText(html=track_lyrics.prettify()))] |             return [Lyrics(text=FormattedText(html=track_lyrics.prettify()))] | ||||||
|  |  | ||||||
|         return [] |         return [] | ||||||
| @@ -323,7 +351,6 @@ class Bandcamp(Page): | |||||||
|         if len(other_data_list) > 0: |         if len(other_data_list) > 0: | ||||||
|             other_data = json.loads(other_data_list[0]["data-tralbum"]) |             other_data = json.loads(other_data_list[0]["data-tralbum"]) | ||||||
|  |  | ||||||
|         if DEBUG: |  | ||||||
|         dump_to_file("bandcamp_song_data.json", data_container.text, is_json=True, exit_after_dump=False) |         dump_to_file("bandcamp_song_data.json", data_container.text, is_json=True, exit_after_dump=False) | ||||||
|         dump_to_file("bandcamp_song_data_other.json", json.dumps(other_data), is_json=True, exit_after_dump=False) |         dump_to_file("bandcamp_song_data_other.json", json.dumps(other_data), is_json=True, exit_after_dump=False) | ||||||
|         dump_to_file("bandcamp_song_page.html", r.text, exit_after_dump=False) |         dump_to_file("bandcamp_song_page.html", r.text, exit_after_dump=False) | ||||||
| @@ -337,8 +364,8 @@ class Bandcamp(Page): | |||||||
|             mp3_url = value |             mp3_url = value | ||||||
|  |  | ||||||
|         song = Song( |         song = Song( | ||||||
|             title=data["name"].strip(), |             title=clean_song_title(data["name"], artist_name=artist_data["name"]), | ||||||
|             source_list=[Source(self.SOURCE_TYPE, data.get("mainEntityOfPage", data["@id"]), audio_url=mp3_url)], |             source_list=[source, Source(self.SOURCE_TYPE, data.get("mainEntityOfPage", data["@id"]), audio_url=mp3_url)], | ||||||
|             album_list=[Album( |             album_list=[Album( | ||||||
|                 title=album_data["name"].strip(), |                 title=album_data["name"].strip(), | ||||||
|                 date=ID3Timestamp.strptime(data["datePublished"], "%d %b %Y %H:%M:%S %Z"), |                 date=ID3Timestamp.strptime(data["datePublished"], "%d %b %Y %H:%M:%S %Z"), | ||||||
| @@ -351,8 +378,6 @@ class Bandcamp(Page): | |||||||
|             lyrics_list=self._fetch_lyrics(soup=soup) |             lyrics_list=self._fetch_lyrics(soup=soup) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         song.source_collection.append(source) |  | ||||||
|  |  | ||||||
|         return song |         return song | ||||||
|  |  | ||||||
|     def download_song_to_target(self, source: Source, target: Target, desc: str = None) -> DownloadResult: |     def download_song_to_target(self, source: Source, target: Target, desc: str = None) -> DownloadResult: | ||||||
|   | |||||||
| @@ -26,6 +26,7 @@ from ..objects import ( | |||||||
| ) | ) | ||||||
| from ..utils.config import logging_settings | from ..utils.config import logging_settings | ||||||
| from ..utils import string_processing, shared | from ..utils import string_processing, shared | ||||||
|  | from ..utils.string_processing import clean_song_title | ||||||
| from ..utils.support_classes.query import Query | from ..utils.support_classes.query import Query | ||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.support_classes.download_result import DownloadResult | ||||||
|  |  | ||||||
| @@ -120,6 +121,7 @@ class Musify(Page): | |||||||
|         self.connection: Connection = Connection( |         self.connection: Connection = Connection( | ||||||
|             host="https://musify.club/", |             host="https://musify.club/", | ||||||
|             logger=self.LOGGER, |             logger=self.LOGGER, | ||||||
|  |             module="musify", | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         self.stream_connection: Connection = Connection( |         self.stream_connection: Connection = Connection( | ||||||
| @@ -355,8 +357,10 @@ class Musify(Page): | |||||||
|             if raw_id.isdigit(): |             if raw_id.isdigit(): | ||||||
|                 _id = raw_id |                 _id = raw_id | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|         return Song( |         return Song( | ||||||
|             title=song_title, |             title=clean_song_title(song_title, artist_name=artist_list[0].name if len(artist_list) > 0 else None), | ||||||
|             main_artist_list=artist_list, |             main_artist_list=artist_list, | ||||||
|             source_list=source_list |             source_list=source_list | ||||||
|         ) |         ) | ||||||
| @@ -390,10 +394,11 @@ class Musify(Page): | |||||||
|         return search_results |         return search_results | ||||||
|      |      | ||||||
|     def fetch_song(self, source: Source, stop_at_level: int = 1) -> Song: |     def fetch_song(self, source: Source, stop_at_level: int = 1) -> Song: | ||||||
|         # https://musify.club/track/linkin-park-numb-210765 |         musify_url = parse_url(source.url) | ||||||
|         r = self.connection.get(source.url) |  | ||||||
|  |         r = self.connection.get(source.url, name="track_" + musify_url.name_with_id) | ||||||
|         if r is None: |         if r is None: | ||||||
|             return Song |             return Song() | ||||||
|          |          | ||||||
|         soup = self.get_soup_from_response(r) |         soup = self.get_soup_from_response(r) | ||||||
|          |          | ||||||
| @@ -502,7 +507,7 @@ class Musify(Page): | |||||||
|                 )) |                 )) | ||||||
|          |          | ||||||
|         return Song( |         return Song( | ||||||
|             title=track_name, |             title=clean_song_title(track_name, artist_name=artist_list[0].name if len(artist_list) > 0 else None), | ||||||
|             source_list=source_list, |             source_list=source_list, | ||||||
|             lyrics_list=lyrics_list, |             lyrics_list=lyrics_list, | ||||||
|             main_artist_list=artist_list, |             main_artist_list=artist_list, | ||||||
| @@ -645,7 +650,7 @@ class Musify(Page): | |||||||
|                     )) |                     )) | ||||||
|  |  | ||||||
|         return Song( |         return Song( | ||||||
|             title=song_name, |             title=clean_song_title(song_name, artist_name=artist_list[0].name if len(artist_list) > 0 else None), | ||||||
|             tracksort=tracksort, |             tracksort=tracksort, | ||||||
|             main_artist_list=artist_list, |             main_artist_list=artist_list, | ||||||
|             source_list=source_list |             source_list=source_list | ||||||
| @@ -669,7 +674,7 @@ class Musify(Page): | |||||||
|         url = parse_url(source.url) |         url = parse_url(source.url) | ||||||
|  |  | ||||||
|         endpoint = self.HOST + "/release/" + url.name_with_id |         endpoint = self.HOST + "/release/" + url.name_with_id | ||||||
|         r = self.connection.get(endpoint) |         r = self.connection.get(endpoint, name=url.name_with_id) | ||||||
|         if r is None: |         if r is None: | ||||||
|             return Album() |             return Album() | ||||||
|  |  | ||||||
| @@ -706,7 +711,7 @@ class Musify(Page): | |||||||
|         :return: |         :return: | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         r = self.connection.get(f"https://musify.club/{url.source_type.value}/{url.name_with_id}?_pjax=#bodyContent") |         r = self.connection.get(f"https://musify.club/{url.source_type.value}/{url.name_with_id}?_pjax=#bodyContent", name="artist_attributes_" + url.name_with_id) | ||||||
|         if r is None: |         if r is None: | ||||||
|             return Artist() |             return Artist() | ||||||
|  |  | ||||||
| @@ -1072,7 +1077,7 @@ class Musify(Page): | |||||||
|             "SortOrder.Property": "dateCreated", |             "SortOrder.Property": "dateCreated", | ||||||
|             "SortOrder.IsAscending": False, |             "SortOrder.IsAscending": False, | ||||||
|             "X-Requested-With": "XMLHttpRequest" |             "X-Requested-With": "XMLHttpRequest" | ||||||
|         }) |         }, name="discography_" + url.name_with_id) | ||||||
|         if r is None: |         if r is None: | ||||||
|             return [] |             return [] | ||||||
|         soup: BeautifulSoup = BeautifulSoup(r.content, features="html.parser") |         soup: BeautifulSoup = BeautifulSoup(r.content, features="html.parser") | ||||||
| @@ -1123,4 +1128,4 @@ class Musify(Page): | |||||||
|  |  | ||||||
|             self.LOGGER.warning(f"The source has no audio link. Falling back to {endpoint}.") |             self.LOGGER.warning(f"The source has no audio link. Falling back to {endpoint}.") | ||||||
|  |  | ||||||
|         return self.stream_connection.stream_into(endpoint, target, raw_url=True, exclude_headers=["Host"]) |         return self.stream_connection.stream_into(endpoint, target, raw_url=True, exclude_headers=["Host"], name=desc) | ||||||
|   | |||||||
| @@ -2,8 +2,7 @@ from typing import List, Optional, Type, Tuple | |||||||
| from urllib.parse import urlparse, urlunparse, parse_qs | from urllib.parse import urlparse, urlunparse, parse_qs | ||||||
| from enum import Enum | from enum import Enum | ||||||
|  |  | ||||||
| import sponsorblock | import python_sponsorblock | ||||||
| from sponsorblock.errors import HTTPException, NotFoundException |  | ||||||
|  |  | ||||||
| from ..objects import Source, DatabaseObject, Song, Target | from ..objects import Source, DatabaseObject, Song, Target | ||||||
| from .abstract import Page | from .abstract import Page | ||||||
| @@ -63,8 +62,9 @@ class YouTube(SuperYouTube): | |||||||
|         ) |         ) | ||||||
|          |          | ||||||
|         # the stuff with the connection is, to ensure sponsorblock uses the proxies, my programm does |         # the stuff with the connection is, to ensure sponsorblock uses the proxies, my programm does | ||||||
|         _sponsorblock_connection: Connection = Connection(host="https://sponsor.ajay.app/") |         _sponsorblock_connection: Connection = Connection() | ||||||
|         self.sponsorblock_client = sponsorblock.Client(session=_sponsorblock_connection.session) |         self.sponsorblock = python_sponsorblock.SponsorBlock(silent=True, session=_sponsorblock_connection.session) | ||||||
|  |  | ||||||
|  |  | ||||||
|         super().__init__(*args, **kwargs) |         super().__init__(*args, **kwargs) | ||||||
|  |  | ||||||
| @@ -344,10 +344,10 @@ class YouTube(SuperYouTube): | |||||||
|          |          | ||||||
|         segments = [] |         segments = [] | ||||||
|         try: |         try: | ||||||
|             segments = self.sponsorblock_client.get_skip_segments(parsed.id) |             segments = self.sponsorblock.get_segments(parsed.id) | ||||||
|         except NotFoundException: |         except NotFoundException: | ||||||
|             self.LOGGER.debug(f"No sponsor found for the video {parsed.id}.") |             self.LOGGER.debug(f"No sponsor found for the video {parsed.id}.") | ||||||
|         except HTTPException as e: |         except HTTPException as e: | ||||||
|             self.LOGGER.warning(f"{e}") |             self.LOGGER.warning(f"{e}") | ||||||
|  |  | ||||||
|         return [(segment.start, segment.end) for segment in segments] |         return [(segment.segment[0], segment.segment[1]) for segment in segments] | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ from typing import List, Optional | |||||||
| from enum import Enum | from enum import Enum | ||||||
|  |  | ||||||
| from ...utils.config import youtube_settings, logging_settings | from ...utils.config import youtube_settings, logging_settings | ||||||
|  | from ...utils.string_processing import clean_song_title | ||||||
| from ...objects import Source, DatabaseObject | from ...objects import Source, DatabaseObject | ||||||
| from ..abstract import Page | from ..abstract import Page | ||||||
| from ...objects import ( | from ...objects import ( | ||||||
| @@ -59,7 +60,7 @@ def parse_run_element(run_element: dict) -> Optional[DatabaseObject]: | |||||||
|      |      | ||||||
|     if element_type == PageType.SONG or (element_type == PageType.VIDEO and not youtube_settings["youtube_music_clean_data"]) or (element_type == PageType.OFFICIAL_MUSIC_VIDEO and not youtube_settings["youtube_music_clean_data"]): |     if element_type == PageType.SONG or (element_type == PageType.VIDEO and not youtube_settings["youtube_music_clean_data"]) or (element_type == PageType.OFFICIAL_MUSIC_VIDEO and not youtube_settings["youtube_music_clean_data"]): | ||||||
|         source = Source(SOURCE_PAGE, f"https://music.youtube.com/watch?v={element_id}") |         source = Source(SOURCE_PAGE, f"https://music.youtube.com/watch?v={element_id}") | ||||||
|         return Song(title=element_text, source_list=[source]) |         return Song(title=clean_song_title(element_text), source_list=[source]) | ||||||
|  |  | ||||||
|     if element_type == PageType.ARTIST or (element_type == PageType.CHANNEL and not youtube_settings["youtube_music_clean_data"]): |     if element_type == PageType.ARTIST or (element_type == PageType.CHANNEL and not youtube_settings["youtube_music_clean_data"]): | ||||||
|         source = Source(SOURCE_PAGE, f"https://music.youtube.com/channel/{element_id}") |         source = Source(SOURCE_PAGE, f"https://music.youtube.com/channel/{element_id}") | ||||||
|   | |||||||
| @@ -3,8 +3,7 @@ from urllib.parse import urlparse, urlunparse, parse_qs | |||||||
| from enum import Enum | from enum import Enum | ||||||
| import requests | import requests | ||||||
|  |  | ||||||
| import sponsorblock | import python_sponsorblock | ||||||
| from sponsorblock.errors import HTTPException, NotFoundException |  | ||||||
|  |  | ||||||
| from ...objects import Source, DatabaseObject, Song, Target | from ...objects import Source, DatabaseObject, Song, Target | ||||||
| from ..abstract import Page | from ..abstract import Page | ||||||
| @@ -143,9 +142,8 @@ class SuperYouTube(Page): | |||||||
|         ) |         ) | ||||||
|          |          | ||||||
|         # the stuff with the connection is, to ensure sponsorblock uses the proxies, my programm does |         # the stuff with the connection is, to ensure sponsorblock uses the proxies, my programm does | ||||||
|         _sponsorblock_connection: Connection = Connection(host="https://sponsor.ajay.app/") |         _sponsorblock_connection: Connection = Connection() | ||||||
|         self.sponsorblock_client = sponsorblock.Client(session=_sponsorblock_connection.session) |         self.sponsorblock = python_sponsorblock.SponsorBlock(silent=True, session=_sponsorblock_connection.session) | ||||||
|  |  | ||||||
|  |  | ||||||
|     def get_source_type(self, source: Source) -> Optional[Type[DatabaseObject]]: |     def get_source_type(self, source: Source) -> Optional[Type[DatabaseObject]]: | ||||||
|         _url_type = { |         _url_type = { | ||||||
| @@ -213,10 +211,10 @@ class SuperYouTube(Page): | |||||||
|          |          | ||||||
|         segments = [] |         segments = [] | ||||||
|         try: |         try: | ||||||
|             segments = self.sponsorblock_client.get_skip_segments(parsed.id) |             segments = self.sponsorblock.get_segments(parsed.id) | ||||||
|         except NotFoundException: |         except NotFoundException: | ||||||
|             self.LOGGER.debug(f"No sponsor found for the video {parsed.id}.") |             self.LOGGER.debug(f"No sponsor found for the video {parsed.id}.") | ||||||
|         except HTTPException as e: |         except HTTPException as e: | ||||||
|             self.LOGGER.warning(f"{e}") |             self.LOGGER.warning(f"{e}") | ||||||
|  |  | ||||||
|         return [(segment.start, segment.end) for segment in segments] |         return [(segment.segment[0], segment.segment[1]) for segment in segments] | ||||||
|   | |||||||
| @@ -171,7 +171,7 @@ class YoutubeMusic(SuperYouTube): | |||||||
|     def __init__(self, *args, ydl_opts: dict = None, **kwargs): |     def __init__(self, *args, ydl_opts: dict = None, **kwargs): | ||||||
|         self.yt_music_connection: YoutubeMusicConnection = YoutubeMusicConnection( |         self.yt_music_connection: YoutubeMusicConnection = YoutubeMusicConnection( | ||||||
|             logger=self.LOGGER, |             logger=self.LOGGER, | ||||||
|             accept_language="en-US,en;q=0.5" |             accept_language="en-US,en;q=0.5", | ||||||
|         ) |         ) | ||||||
|         self.credentials: YouTubeMusicCredentials = YouTubeMusicCredentials( |         self.credentials: YouTubeMusicCredentials = YouTubeMusicCredentials( | ||||||
|             api_key=youtube_settings["youtube_music_api_key"], |             api_key=youtube_settings["youtube_music_api_key"], | ||||||
| @@ -212,7 +212,7 @@ class YoutubeMusic(SuperYouTube): | |||||||
|         search for: "innertubeApiKey" |         search for: "innertubeApiKey" | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         r = self.yt_music_connection.get("https://music.youtube.com/") |         r = self.yt_music_connection.get("https://music.youtube.com/", name="youtube_music_index.html", disable_cache=True, enable_cache_readonly=True) | ||||||
|         if r is None: |         if r is None: | ||||||
|             return |             return | ||||||
|  |  | ||||||
| @@ -232,7 +232,7 @@ class YoutubeMusic(SuperYouTube): | |||||||
|                 'set_ytc': 'true', |                 'set_ytc': 'true', | ||||||
|                 'set_apyt': 'true', |                 'set_apyt': 'true', | ||||||
|                 'set_eom': 'false' |                 'set_eom': 'false' | ||||||
|             }) |             }, disable_cache=True) | ||||||
|             if r is None: |             if r is None: | ||||||
|                 return |                 return | ||||||
|  |  | ||||||
| @@ -247,9 +247,9 @@ class YoutubeMusic(SuperYouTube): | |||||||
|             # save cookies in settings |             # save cookies in settings | ||||||
|             youtube_settings["youtube_music_consent_cookies"] = cookie_dict |             youtube_settings["youtube_music_consent_cookies"] = cookie_dict | ||||||
|         else: |         else: | ||||||
|             self.yt_music_connection.save(r, "index.html") |             self.yt_music_connection.save(r, "youtube_music_index.html", no_update_if_valid_exists=True) | ||||||
|  |  | ||||||
|         r = self.yt_music_connection.get("https://music.youtube.com/", name="index.html") |         r = self.yt_music_connection.get("https://music.youtube.com/", name="youtube_music_index.html") | ||||||
|         if r is None: |         if r is None: | ||||||
|             return |             return | ||||||
|  |  | ||||||
| @@ -374,7 +374,8 @@ class YoutubeMusic(SuperYouTube): | |||||||
|             }, |             }, | ||||||
|             headers={ |             headers={ | ||||||
|                 "Referer": get_youtube_url(path=f"/search", query=f"q={urlescaped_query}") |                 "Referer": get_youtube_url(path=f"/search", query=f"q={urlescaped_query}") | ||||||
|             } |             }, | ||||||
|  |             name=f"search_{search_query}.json" | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         if r is None: |         if r is None: | ||||||
| @@ -411,7 +412,8 @@ class YoutubeMusic(SuperYouTube): | |||||||
|             json={ |             json={ | ||||||
|                 "browseId": browse_id, |                 "browseId": browse_id, | ||||||
|                 "context": {**self.credentials.context, "adSignalsInfo": {"params": []}} |                 "context": {**self.credentials.context, "adSignalsInfo": {"params": []}} | ||||||
|             } |             }, | ||||||
|  |             name=f"fetch_artist_{browse_id}.json" | ||||||
|         ) |         ) | ||||||
|         if r is None: |         if r is None: | ||||||
|             return artist |             return artist | ||||||
| @@ -454,7 +456,8 @@ class YoutubeMusic(SuperYouTube): | |||||||
|             json={ |             json={ | ||||||
|                 "browseId": browse_id, |                 "browseId": browse_id, | ||||||
|                 "context": {**self.credentials.context, "adSignalsInfo": {"params": []}} |                 "context": {**self.credentials.context, "adSignalsInfo": {"params": []}} | ||||||
|             } |             }, | ||||||
|  |             name=f"fetch_album_{browse_id}.json" | ||||||
|         ) |         ) | ||||||
|         if r is None: |         if r is None: | ||||||
|             return album |             return album | ||||||
|   | |||||||
| @@ -2,8 +2,9 @@ from datetime import datetime | |||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import json | import json | ||||||
| import logging | import logging | ||||||
|  | import inspect | ||||||
|  |  | ||||||
| from .shared import DEBUG, DEBUG_LOGGING, DEBUG_DUMP, DEBUG_TRACE, DEBUG_OBJECT_TRACE | from .shared import DEBUG, DEBUG_LOGGING, DEBUG_DUMP, DEBUG_TRACE, DEBUG_OBJECT_TRACE, DEBUG_OBJECT_TRACE_CALLSTACK | ||||||
| from .config import config, read_config, write_config | from .config import config, read_config, write_config | ||||||
| from .enums.colors import BColors | from .enums.colors import BColors | ||||||
| from .path_manager import LOCATIONS | from .path_manager import LOCATIONS | ||||||
| @@ -50,13 +51,20 @@ def trace(msg: str): | |||||||
|     if not DEBUG_TRACE: |     if not DEBUG_TRACE: | ||||||
|         return |         return | ||||||
|  |  | ||||||
|     output("trace: " + msg, BColors.OKBLUE) |     output(BColors.OKBLUE.value + "trace: " + BColors.ENDC.value + msg) | ||||||
|  |  | ||||||
|  | def request_trace(msg: str): | ||||||
|  |     if not DEBUG_TRACE: | ||||||
|  |         return | ||||||
|  |  | ||||||
|  |     output(BColors.OKGREEN.value + "request: " + BColors.ENDC.value + msg) | ||||||
|  |  | ||||||
| def object_trace(obj): | def object_trace(obj): | ||||||
|     if not DEBUG_OBJECT_TRACE: |     if not DEBUG_OBJECT_TRACE: | ||||||
|         return |         return | ||||||
|  |  | ||||||
|     output("object: " + str(obj), BColors.GREY) |     appendix =  f" called by [{' | '.join(f'{s.function} {Path(s.filename).name}:{str(s.lineno)}' for s in inspect.stack()[1:5])}]" if DEBUG_OBJECT_TRACE_CALLSTACK else "" | ||||||
|  |     output("object: " + str(obj) + appendix) | ||||||
|  |  | ||||||
|  |  | ||||||
| """ | """ | ||||||
|   | |||||||
| @@ -13,12 +13,13 @@ if not load_dotenv(Path(__file__).parent.parent.parent / ".env"): | |||||||
| __stage__ = os.getenv("STAGE", "prod") | __stage__ = os.getenv("STAGE", "prod") | ||||||
|  |  | ||||||
| DEBUG = (__stage__ == "dev") and True | DEBUG = (__stage__ == "dev") and True | ||||||
| DEBUG_LOGGING = DEBUG and True | DEBUG_LOGGING = DEBUG and False | ||||||
| DEBUG_TRACE = DEBUG and True | DEBUG_TRACE = DEBUG and True | ||||||
| DEBUG_OBJECT_TRACE = DEBUG and False | DEBUG_OBJECT_TRACE = DEBUG and False | ||||||
|  | DEBUG_OBJECT_TRACE_CALLSTACK = DEBUG_OBJECT_TRACE and False | ||||||
| DEBUG_YOUTUBE_INITIALIZING = DEBUG and False | DEBUG_YOUTUBE_INITIALIZING = DEBUG and False | ||||||
| DEBUG_PAGES = DEBUG and False | DEBUG_PAGES = DEBUG and False | ||||||
| DEBUG_DUMP = DEBUG and True | DEBUG_DUMP = DEBUG and False | ||||||
|  |  | ||||||
| if DEBUG: | if DEBUG: | ||||||
|     print("DEBUG ACTIVE") |     print("DEBUG ACTIVE") | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| from typing import Tuple, Union | from typing import Tuple, Union, Optional | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import string | import string | ||||||
|  | from functools import lru_cache | ||||||
|  |  | ||||||
| from transliterate.exceptions import LanguageDetectionError | from transliterate.exceptions import LanguageDetectionError | ||||||
| from transliterate import translit | from transliterate import translit | ||||||
| @@ -10,8 +11,11 @@ from pathvalidate import sanitize_filename | |||||||
| COMMON_TITLE_APPENDIX_LIST: Tuple[str, ...] = ( | COMMON_TITLE_APPENDIX_LIST: Tuple[str, ...] = ( | ||||||
|     "(official video)", |     "(official video)", | ||||||
| ) | ) | ||||||
|  | OPEN_BRACKETS = "([" | ||||||
|  | CLOSE_BRACKETS = ")]" | ||||||
|  | DISALLOWED_SUBSTRING_IN_BRACKETS = ("official", "video", "audio", "lyrics", "prod", "remix", "ft", "feat", "ft.", "feat.") | ||||||
|  |  | ||||||
|  | @lru_cache | ||||||
| def unify(string: str) -> str: | def unify(string: str) -> str: | ||||||
|     """ |     """ | ||||||
|     returns a unified str, to make comparisons easy. |     returns a unified str, to make comparisons easy. | ||||||
| @@ -30,13 +34,15 @@ def unify(string: str) -> str: | |||||||
|     return string.lower() |     return string.lower() | ||||||
|  |  | ||||||
|  |  | ||||||
| def fit_to_file_system(string: Union[str, Path]) -> Union[str, Path]: | def fit_to_file_system(string: Union[str, Path], hidden_ok: bool = False) -> Union[str, Path]: | ||||||
|     def fit_string(string: str) -> str: |     def fit_string(string: str) -> str: | ||||||
|  |         nonlocal hidden_ok | ||||||
|  |          | ||||||
|         if string == "/": |         if string == "/": | ||||||
|             return "/" |             return "/" | ||||||
|         string = string.strip() |         string = string.strip() | ||||||
|  |  | ||||||
|         while string[0] == ".": |         while string[0] == "." and not hidden_ok: | ||||||
|             if len(string) == 0: |             if len(string) == 0: | ||||||
|                 return string |                 return string | ||||||
|  |  | ||||||
| @@ -52,7 +58,8 @@ def fit_to_file_system(string: Union[str, Path]) -> Union[str, Path]: | |||||||
|         return fit_string(string) |         return fit_string(string) | ||||||
|  |  | ||||||
|  |  | ||||||
| def clean_song_title(raw_song_title: str, artist_name: str) -> str: | @lru_cache(maxsize=128) | ||||||
|  | def clean_song_title(raw_song_title: str, artist_name: Optional[str] = None) -> str: | ||||||
|     """ |     """ | ||||||
|     This function cleans common naming "conventions" for non clean song titles, like the title of youtube videos |     This function cleans common naming "conventions" for non clean song titles, like the title of youtube videos | ||||||
|      |      | ||||||
| @@ -64,13 +71,39 @@ def clean_song_title(raw_song_title: str, artist_name: str) -> str: | |||||||
|     - `song (prod. some producer)` |     - `song (prod. some producer)` | ||||||
|     """ |     """ | ||||||
|     raw_song_title = raw_song_title.strip() |     raw_song_title = raw_song_title.strip() | ||||||
|     artist_name = artist_name.strip() |  | ||||||
|  |  | ||||||
|     # Clean official Video appendix |     # Clean official Video appendix | ||||||
|     for dirty_appendix in COMMON_TITLE_APPENDIX_LIST: |     for dirty_appendix in COMMON_TITLE_APPENDIX_LIST: | ||||||
|         if raw_song_title.lower().endswith(dirty_appendix): |         if raw_song_title.lower().endswith(dirty_appendix): | ||||||
|             raw_song_title = raw_song_title[:-len(dirty_appendix)].strip() |             raw_song_title = raw_song_title[:-len(dirty_appendix)].strip() | ||||||
|  |  | ||||||
|  |     # remove brackets and their content if they contain disallowed substrings | ||||||
|  |     for open_bracket, close_bracket in zip(OPEN_BRACKETS, CLOSE_BRACKETS): | ||||||
|  |         if open_bracket not in raw_song_title or close_bracket not in raw_song_title: | ||||||
|  |             continue | ||||||
|  |          | ||||||
|  |         start = 0 | ||||||
|  |  | ||||||
|  |         while True: | ||||||
|  |             try: | ||||||
|  |                 open_bracket_index = raw_song_title.index(open_bracket, start) | ||||||
|  |             except ValueError: | ||||||
|  |                 break | ||||||
|  |             try: | ||||||
|  |                 close_bracket_index = raw_song_title.index(close_bracket, open_bracket_index + 1) | ||||||
|  |             except ValueError: | ||||||
|  |                 break | ||||||
|  |  | ||||||
|  |             substring = raw_song_title[open_bracket_index + 1:close_bracket_index] | ||||||
|  |             if any(disallowed_substring in substring.lower() for disallowed_substring in DISALLOWED_SUBSTRING_IN_BRACKETS): | ||||||
|  |                 raw_song_title = raw_song_title[:open_bracket_index] + raw_song_title[close_bracket_index + 1:] | ||||||
|  |             else: | ||||||
|  |                 start = close_bracket_index + 1 | ||||||
|  |  | ||||||
|  |     # everything that requires the artist name | ||||||
|  |     if artist_name is not None: | ||||||
|  |         artist_name = artist_name.strip() | ||||||
|  |  | ||||||
|         # Remove artist from the start of the title |         # Remove artist from the start of the title | ||||||
|         if raw_song_title.lower().startswith(artist_name.lower()): |         if raw_song_title.lower().startswith(artist_name.lower()): | ||||||
|             raw_song_title = raw_song_title[len(artist_name):].strip() |             raw_song_title = raw_song_title[len(artist_name):].strip() | ||||||
| @@ -146,3 +179,8 @@ def match_length(length_1: int | None, length_2: int | None) -> bool: | |||||||
|         return True |         return True | ||||||
|     return abs(length_1 - length_2) <= ALLOWED_LENGTH_DISTANCE |     return abs(length_1 - length_2) <= ALLOWED_LENGTH_DISTANCE | ||||||
|  |  | ||||||
|  | def shorten_display_url(url: str, max_length: int = 150, chars_at_end: int = 4, shorten_string: str = "[...]") -> str: | ||||||
|  |     if len(url) <= max_length + chars_at_end + len(shorten_string): | ||||||
|  |         return url | ||||||
|  |      | ||||||
|  |     return url[:max_length] + shorten_string + url[-chars_at_end:] | ||||||
|   | |||||||
| @@ -56,6 +56,7 @@ dependencies = [ | |||||||
|  |  | ||||||
|     "rich~=13.7.1", |     "rich~=13.7.1", | ||||||
|     "mistune~=3.0.2", |     "mistune~=3.0.2", | ||||||
|  |     "markdownify~=0.12.1", | ||||||
|     "html2markdown~=0.1.7", |     "html2markdown~=0.1.7", | ||||||
|     "jellyfish~=0.9.0", |     "jellyfish~=0.9.0", | ||||||
|     "transliterate~=1.10.2", |     "transliterate~=1.10.2", | ||||||
| @@ -68,7 +69,7 @@ dependencies = [ | |||||||
|     "toml~=0.10.2", |     "toml~=0.10.2", | ||||||
|     "typing_extensions~=4.7.1", |     "typing_extensions~=4.7.1", | ||||||
|  |  | ||||||
|     "sponsorblock~=0.1.3", |     "python-sponsorblock~=0.0.0", | ||||||
|     "youtube_dl", |     "youtube_dl", | ||||||
| ] | ] | ||||||
| dynamic = [ | dynamic = [ | ||||||
|   | |||||||
| @@ -1,25 +0,0 @@ | |||||||
| requests~=2.31.0 |  | ||||||
| mutagen~=1.46.0 |  | ||||||
| musicbrainzngs~=0.7.1 |  | ||||||
| jellyfish~=0.9.0 |  | ||||||
| beautifulsoup4~=4.11.1 |  | ||||||
| pycountry~=24.0.1 |  | ||||||
| python-dateutil~=2.8.2 |  | ||||||
| pandoc~=2.3 |  | ||||||
| SQLAlchemy~=2.0.7 |  | ||||||
| setuptools~=68.2.0 |  | ||||||
| tqdm~=4.65.0 |  | ||||||
| ffmpeg-python~=0.2.0 |  | ||||||
| platformdirs~=4.2.0 |  | ||||||
| transliterate~=1.10.2 |  | ||||||
| sponsorblock~=0.1.3 |  | ||||||
| regex~=2022.9.13 |  | ||||||
| pyffmpeg~=2.4.2.18 |  | ||||||
| ffmpeg-progress-yield~=0.7.8 |  | ||||||
| pathvalidate~=2.5.2 |  | ||||||
| guppy3~=3.1.3 |  | ||||||
| toml~=0.10.2 |  | ||||||
| typing_extensions~=4.7.1 |  | ||||||
| responses~=0.24.1 |  | ||||||
| youtube_dl |  | ||||||
| merge_args~=0.1.5 |  | ||||||
| @@ -70,7 +70,49 @@ class TestCollection(unittest.TestCase): | |||||||
|         self.assertTrue(a.name == b.name == c.name == d.name == "artist") |         self.assertTrue(a.name == b.name == c.name == d.name == "artist") | ||||||
|         self.assertTrue(a.country == b.country == c.country == d.country) |         self.assertTrue(a.country == b.country == c.country == d.country) | ||||||
|  |  | ||||||
|     """ |     def test_artist_artist_relation(self): | ||||||
|  |         artist = Artist( | ||||||
|  |             name="artist", | ||||||
|  |             main_album_list=[ | ||||||
|  |                 Album( | ||||||
|  |                     title="album", | ||||||
|  |                     song_list=[ | ||||||
|  |                         Song(title="song"), | ||||||
|  |                     ], | ||||||
|  |                     artist_list=[ | ||||||
|  |                         Artist(name="artist"), | ||||||
|  |                     ] | ||||||
|  |                 ) | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         self.assertTrue(artist.id == artist.main_album_collection[0].song_collection[0].main_artist_collection[0].id) | ||||||
|  |  | ||||||
|  |     def test_artist_collection_sync(self): | ||||||
|  |         album_1 = Album( | ||||||
|  |             title="album", | ||||||
|  |             song_list=[ | ||||||
|  |                 Song(title="song", main_artist_list=[Artist(name="artist")]), | ||||||
|  |             ], | ||||||
|  |             artist_list=[ | ||||||
|  |                 Artist(name="artist"), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         album_2 = Album( | ||||||
|  |             title="album", | ||||||
|  |             song_list=[ | ||||||
|  |                 Song(title="song", main_artist_list=[Artist(name="artist")]), | ||||||
|  |             ], | ||||||
|  |             artist_list=[ | ||||||
|  |                 Artist(name="artist"), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         album_1.merge(album_2) | ||||||
|  |  | ||||||
|  |         self.assertTrue(id(album_1.artist_collection) == id(album_1.artist_collection) == id(album_1.song_collection[0].main_artist_collection) == id(album_1.song_collection[0].main_artist_collection)) | ||||||
|  |  | ||||||
|     def test_song_artist_relations(self): |     def test_song_artist_relations(self): | ||||||
|         a = self.complicated_object() |         a = self.complicated_object() | ||||||
|         b = a.main_album_collection[0].song_collection[0].main_artist_collection[0] |         b = a.main_album_collection[0].song_collection[0].main_artist_collection[0] | ||||||
| @@ -80,7 +122,6 @@ class TestCollection(unittest.TestCase): | |||||||
|         self.assertTrue(a.id == b.id == c.id == d.id) |         self.assertTrue(a.id == b.id == c.id == d.id) | ||||||
|         self.assertTrue(a.name == b.name == c.name == d.name == "artist") |         self.assertTrue(a.name == b.name == c.name == d.name == "artist") | ||||||
|         self.assertTrue(a.country == b.country == c.country == d.country) |         self.assertTrue(a.country == b.country == c.country == d.country) | ||||||
|     """ |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||